commit - ee0223452ddad06224fbfbda668d9f8fce23ee24
commit + 245331a60d743b7b73ba3a8b15e6f4648273369f
blob - 09e198020d84a6bd4cd1202d0cb69c01c11543ca
blob + 5dd4981b8fcf093cd267f9bb2692e8a2c7679815
--- .github/workflows/python-distributions.yml
+++ .github/workflows/python-distributions.yml
run: pip install -U gpg
if: "matrix.os != 'windows-latest'"
- name: Run test suite
- run: python -m unittest dulwich.tests.test_suite
+ run: python -m unittest tests.test_suite
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
if: "matrix.os == 'ubuntu-latest'"
blob - 7686015fa2bbf283f7f74538de75f8266cf11cbd
blob + 21d0fafcaf8e83a4b6c6ab69d62fc9b9f4dafcef
--- .github/workflows/pythontest.yml
+++ .github/workflows/pythontest.yml
- name: Coverage test suite run
run: |
pip install --upgrade coverage
- python -m coverage run -p -m unittest dulwich.tests.test_suite
+ python -m coverage run -p -m unittest tests.test_suite
blob - 64964357e1f50efe6cb570df8d609b9a692a93d2
blob + dfee9e5c50d6c1f60a74082ce80eb2c3eb063059
--- .stestr.conf
+++ .stestr.conf
[DEFAULT]
-test_path=dulwich/tests
+test_path=tests
blob - 5043861248911e21e58e0b5deeb90c070fbf679a
blob + c2327f8f5e9d481c1bb5f2768d23199d09a34c89
--- .testr.conf
+++ .testr.conf
[DEFAULT]
-test_command=PYTHONPATH=. python -m subunit.run $IDOPTION $LISTOPT dulwich.tests.test_suite
+test_command=PYTHONPATH=. python3 -m subunit.run $IDOPTION $LISTOPT tests.test_suite
test_id_option=--load-list $IDFILE
test_list_option=--list
blob - aa937f428338799a928640e2e3ff2227be4195fd
blob + 3a4f1b22f235a1fa2e5b2e30ea354365de12a80b
--- Makefile
+++ Makefile
$(SETUP) install --root="$(DESTDIR)"
check:: build
- $(RUNTEST) dulwich.tests.test_suite
+ $(RUNTEST) tests.test_suite
check-tutorial:: build
- $(RUNTEST) dulwich.tests.tutorial_test_suite
+ $(RUNTEST) tests.tutorial_test_suite
check-nocompat:: build
- $(RUNTEST) dulwich.tests.nocompat_test_suite
+ $(RUNTEST) tests.nocompat_test_suite
check-compat:: build
- $(RUNTEST) dulwich.tests.compat_test_suite
+ $(RUNTEST) tests.compat_test_suite
check-pypy:: clean
$(MAKE) check-noextensions PYTHON=pypy
check-noextensions:: clean
- $(RUNTEST) dulwich.tests.test_suite
+ $(RUNTEST) tests.test_suite
check-contrib:: clean
$(RUNTEST) -v dulwich.contrib.test_suite
$(RUFF) check .
coverage:
- $(COVERAGE) run -m unittest dulwich.tests.test_suite dulwich.contrib.test_suite
+ $(COVERAGE) run -m unittest tests.test_suite dulwich.contrib.test_suite
coverage-html: coverage
$(COVERAGE) html
blob - cd09f9a77c0326149e70c7aca3dff9f6d65067d4
blob + 6e1c38f328ab7f0e78113eb783941cb595db46a0
--- NEWS
+++ NEWS
0.21.8 UNRELEASED
+
+ * Move tests to root. (Jelmer Vernooij, #1024)
* Convert the optional C implementations to Rust.
(Jelmer Vernooij)
blob - 4976b90a36c6fc3693937aa1aad00137c503c98b
blob + c9a47080133cd0d1a0686d9ce3cfb6a772643102
--- dulwich/contrib/README.swift.rst
+++ dulwich/contrib/README.swift.rst
There is no need to have a Swift cluster running to run the unitests.
Just run the following command in the Dulwich source directory::
- $ PYTHONPATH=. python -m dulwich.contrib.test_swift
+ $ PYTHONPATH=. python -m tests.contrib.test_swift
How to start functional tests
-----------------------------
cluster. To run those functional tests you need a properly configured
configuration file. The tests can be run as follow::
- $ DULWICH_SWIFT_CFG=/etc/swift-dul.conf PYTHONPATH=. python -m dulwich.contrib.test_swift_smoke
+ $ DULWICH_SWIFT_CFG=/etc/swift-dul.conf PYTHONPATH=. python -m tests.contrib.test_swift_smoke
How to install
--------------
blob - cc23d2af193e49247691ff96501ea8380ebb7aff
blob + 4a083036d1d7199bf46e99dd2148982f1d4f1d6c
--- dulwich/contrib/__init__.py
+++ dulwich/contrib/__init__.py
# License, Version 2.0.
#
-
-def test_suite():
- import unittest
-
- names = [
- "paramiko_vendor",
- "release_robot",
- "swift",
- ]
- module_names = ["dulwich.contrib.test_" + name for name in names]
- loader = unittest.TestLoader()
- return loader.loadTestsFromNames(module_names)
blob - 496987e20be3c5d236fe6e6c92b4bd371b0b99e7 (mode 644)
blob + /dev/null
--- dulwich/contrib/test_paramiko_vendor.py
+++ /dev/null
-# test_paramiko_vendor.py
-#
-# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
-# General Public License as public by the Free Software Foundation; version 2.0
-# or (at your option) any later version. You can redistribute it and/or
-# modify it under the terms of either of these two licenses.
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# You should have received a copy of the licenses; if not, see
-# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
-# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
-# License, Version 2.0.
-#
-
-"""Tests for paramiko_vendor."""
-
-import socket
-import threading
-from io import StringIO
-from unittest import skipIf
-
-from dulwich.tests import TestCase
-
-try:
- import paramiko
-except ImportError:
- has_paramiko = False
-else:
- has_paramiko = True
- from .paramiko_vendor import ParamikoSSHVendor
-
- class Server(paramiko.ServerInterface):
- """http://docs.paramiko.org/en/2.4/api/server.html."""
-
- def __init__(self, commands, *args, **kwargs) -> None:
- super().__init__(*args, **kwargs)
- self.commands = commands
-
- def check_channel_exec_request(self, channel, command):
- self.commands.append(command)
- return True
-
- def check_auth_password(self, username, password):
- if username == USER and password == PASSWORD:
- return paramiko.AUTH_SUCCESSFUL
- return paramiko.AUTH_FAILED
-
- def check_auth_publickey(self, username, key):
- pubkey = paramiko.RSAKey.from_private_key(StringIO(CLIENT_KEY))
- if username == USER and key == pubkey:
- return paramiko.AUTH_SUCCESSFUL
- return paramiko.AUTH_FAILED
-
- def check_channel_request(self, kind, chanid):
- if kind == "session":
- return paramiko.OPEN_SUCCEEDED
- return paramiko.OPEN_FAILED_ADMINISTRATIVELY_PROHIBITED
-
- def get_allowed_auths(self, username):
- return "password,publickey"
-
-
-USER = "testuser"
-PASSWORD = "test"
-SERVER_KEY = """\
------BEGIN RSA PRIVATE KEY-----
-MIIEpAIBAAKCAQEAy/L1sSYAzxsMprtNXW4u/1jGXXkQmQ2xtmKVlR+RlIL3a1BH
-bzTpPlZyjltAAwzIP8XRh0iJFKz5y3zSQChhX47ZGN0NvQsVct8R+YwsUonwfAJ+
-JN0KBKKvC8fPHlzqBr3gX+ZxqsFH934tQ6wdQPH5eQWtdM8L826lMsH1737uyTGk
-+mCSDjL3c6EzY83g7qhkJU2R4qbi6ne01FaWADzG8sOzXnHT+xpxtk8TTT8yCVUY
-MmBNsSoA/ka3iWz70ghB+6Xb0WpFJZXWq1oYovviPAfZGZSrxBZMxsWMye70SdLl
-TqsBEt0+miIcm9s0fvjWvQuhaHX6mZs5VO4r5QIDAQABAoIBAGYqeYWaYgFdrYLA
-hUrubUCg+g3NHdFuGL4iuIgRXl4lFUh+2KoOuWDu8Uf60iA1AQNhV0sLvQ/Mbv3O
-s4xMLisuZfaclctDiCUZNenqnDFkxEF7BjH1QJV94W5nU4wEQ3/JEmM4D2zYkfKb
-FJW33JeyH6TOgUvohDYYEU1R+J9V8qA243p+ui1uVtNI6Pb0TXJnG5y9Ny4vkSWH
-Fi0QoMPR1r9xJ4SEearGzA/crb4SmmDTKhGSoMsT3d5ATieLmwcS66xWz8w4oFGJ
-yzDq24s4Fp9ccNjMf/xR8XRiekJv835gjEqwF9IXyvgOaq6XJ1iCqGPFDKa25nui
-JnEstOkCgYEA/ZXk7aIanvdeJlTqpX578sJfCnrXLydzE8emk1b7+5mrzGxQ4/pM
-PBQs2f8glT3t0O0mRX9NoRqnwrid88/b+cY4NCOICFZeasX336/gYQxyVeRLJS6Z
-hnGEQqry8qS7PdKAyeHMNmZFrUh4EiHiObymEfQS+mkRUObn0cGBTw8CgYEAzeQU
-D2baec1DawjppKaRynAvWjp+9ry1lZx9unryKVRwjRjkEpw+b3/+hdaF1IvsVSce
-cNj+6W2guZ2tyHuPhZ64/4SJVyE2hKDSKD4xTb2nVjsMeN0bLD2UWXC9mwbx8nWa
-2tmtUZ7a/okQb2cSdosJinRewLNqXIsBXamT1csCgYEA0cXb2RCOQQ6U3dTFPx4A
-3vMXuA2iUKmrsqMoEx6T2LBow/Sefdkik1iFOdipVYwjXP+w9zC2QR1Rxez/DR/X
-8ymceNUjxPHdrSoTQQG29dFcC92MpDeGXQcuyA+uZjcLhbrLOzYEvsOfxBb87NMG
-14hNQPDNekTMREafYo9WrtUCgYAREK54+FVzcwf7fymedA/xb4r9N4v+d3W1iNsC
-8d3Qfyc1CrMct8aVB07ZWQaOr2pPRIbJY7L9NhD0UZVt4I/sy1MaGqonhqE2LP4+
-R6legDG2e/50ph7yc8gwAaA1kUXMiuLi8Nfkw/3yyvmJwklNegi4aRzRbA2Mzhi2
-4q9WMQKBgQCb0JNyxHG4pvLWCF/j0Sm1FfvrpnqSv5678n1j4GX7Ka/TubOK1Y4K
-U+Oib7dKa/zQMWehVFNTayrsq6bKVZ6q7zG+IHiRLw4wjeAxREFH6WUjDrn9vl2l
-D48DKbBuBwuVOJWyq3qbfgJXojscgNQklrsPdXVhDwOF0dYxP89HnA==
------END RSA PRIVATE KEY-----"""
-CLIENT_KEY = """\
------BEGIN RSA PRIVATE KEY-----
-MIIEpAIBAAKCAQEAxvREKSElPOm/0z/nPO+j5rk2tjdgGcGc7We1QZ6TRXYLu7nN
-GeEFIL4p8N1i6dmB+Eydt7xqCU79MWD6Yy4prFe1+/K1wCDUxIbFMxqQcX5zjJzd
-i8j8PbcaUlVhP/OkjtkSxrXaGDO1BzfdV4iEBtTV/2l3zmLKJlt3jnOHLczP24CB
-DTQKp3rKshbRefzot9Y+wnaK692RsYgsyo9YEP0GyWKG9topCHk13r46J6vGLeuj
-ryUKqmbLJkzbJbIcEqwTDo5iHaCVqaMr5Hrb8BdMucSseqZQJsXSd+9tdRcIblUQ
-38kZjmFMm4SFbruJcpZCNM2wNSZPIRX+3eiwNwIDAQABAoIBAHSacOBSJsr+jIi5
-KUOTh9IPtzswVUiDKwARCjB9Sf8p4lKR4N1L/n9kNJyQhApeikgGT2GCMftmqgoo
-tlculQoHFgemBlOmak0MV8NNzF5YKEy/GzF0CDH7gJfEpoyetVFrdA+2QS5yD6U9
-XqKQxiBi2VEqdScmyyeT8AwzNYTnPeH/DOEcnbdRjqiy/CD79F49CQ1lX1Fuqm0K
-I7BivBH1xo/rVnUP4F+IzocDqoga+Pjdj0LTXIgJlHQDSbhsQqWujWQDDuKb+MAw
-sNK4Zf8ErV3j1PyA7f/M5LLq6zgstkW4qikDHo4SpZX8kFOO8tjqb7kujj7XqeaB
-CxqrOTECgYEA73uWkrohcmDJ4KqbuL3tbExSCOUiaIV+sT1eGPNi7GCmXD4eW5Z4
-75v2IHymW83lORSu/DrQ6sKr1nkuRpqr2iBzRmQpl/H+wahIhBXlnJ25uUjDsuPO
-1Pq2LcmyD+jTxVnmbSe/q7O09gZQw3I6H4+BMHmpbf8tC97lqimzpJ0CgYEA1K0W
-ZL70Xtn9quyHvbtae/BW07NZnxvUg4UaVIAL9Zu34JyplJzyzbIjrmlDbv6aRogH
-/KtuG9tfbf55K/jjqNORiuRtzt1hUN1ye4dyW7tHx2/7lXdlqtyK40rQl8P0kqf8
-zaS6BqjnobgSdSpg32rWoL/pcBHPdJCJEgQ8zeMCgYEA0/PK8TOhNIzrP1dgGSKn
-hkkJ9etuB5nW5mEM7gJDFDf6JPupfJ/xiwe6z0fjKK9S57EhqgUYMB55XYnE5iIw
-ZQ6BV9SAZ4V7VsRs4dJLdNC3tn/rDGHJBgCaym2PlbsX6rvFT+h1IC8dwv0V79Ui
-Ehq9WTzkMoE8yhvNokvkPZUCgYEAgBAFxv5xGdh79ftdtXLmhnDvZ6S8l6Fjcxqo
-Ay/jg66Tp43OU226iv/0mmZKM8Dd1xC8dnon4GBVc19jSYYiWBulrRPlx0Xo/o+K
-CzZBN1lrXH1i6dqufpc0jq8TMf/N+q1q/c1uMupsKCY1/xVYpc+ok71b7J7c49zQ
-nOeuUW8CgYA9Infooy65FTgbzca0c9kbCUBmcAPQ2ItH3JcPKWPQTDuV62HcT00o
-fZdIV47Nez1W5Clk191RMy8TXuqI54kocciUWpThc6j44hz49oUueb8U4bLcEHzA
-WxtWBWHwxfSmqgTXilEA3ALJp0kNolLnEttnhENwJpZHlqtes0ZA4w==
------END RSA PRIVATE KEY-----"""
-
-
-@skipIf(not has_paramiko, "paramiko is not installed")
-class ParamikoSSHVendorTests(TestCase):
- def setUp(self):
- import paramiko.transport
-
- # re-enable server functionality for tests
- if hasattr(paramiko.transport, "SERVER_DISABLED_BY_GENTOO"):
- paramiko.transport.SERVER_DISABLED_BY_GENTOO = False
-
- self.commands = []
- socket.setdefaulttimeout(10)
- self.addCleanup(socket.setdefaulttimeout, None)
- self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
- self.socket.bind(("127.0.0.1", 0))
- self.socket.listen(5)
- self.addCleanup(self.socket.close)
- self.port = self.socket.getsockname()[1]
- self.thread = threading.Thread(target=self._run)
- self.thread.start()
-
- def tearDown(self):
- self.thread.join()
-
- def _run(self):
- try:
- conn, addr = self.socket.accept()
- except OSError:
- return False
- self.transport = paramiko.Transport(conn)
- self.addCleanup(self.transport.close)
- host_key = paramiko.RSAKey.from_private_key(StringIO(SERVER_KEY))
- self.transport.add_server_key(host_key)
- server = Server(self.commands)
- self.transport.start_server(server=server)
-
- def test_run_command_password(self):
- vendor = ParamikoSSHVendor(
- allow_agent=False,
- look_for_keys=False,
- )
- vendor.run_command(
- "127.0.0.1",
- "test_run_command_password",
- username=USER,
- port=self.port,
- password=PASSWORD,
- )
-
- self.assertIn(b"test_run_command_password", self.commands)
-
- def test_run_command_with_privkey(self):
- key = paramiko.RSAKey.from_private_key(StringIO(CLIENT_KEY))
-
- vendor = ParamikoSSHVendor(
- allow_agent=False,
- look_for_keys=False,
- )
- vendor.run_command(
- "127.0.0.1",
- "test_run_command_with_privkey",
- username=USER,
- port=self.port,
- pkey=key,
- )
-
- self.assertIn(b"test_run_command_with_privkey", self.commands)
-
- def test_run_command_data_transfer(self):
- vendor = ParamikoSSHVendor(
- allow_agent=False,
- look_for_keys=False,
- )
- con = vendor.run_command(
- "127.0.0.1",
- "test_run_command_data_transfer",
- username=USER,
- port=self.port,
- password=PASSWORD,
- )
-
- self.assertIn(b"test_run_command_data_transfer", self.commands)
-
- channel = self.transport.accept(5)
- channel.send(b"stdout\n")
- channel.send_stderr(b"stderr\n")
- channel.close()
-
- # Fixme: it's return false
- # self.assertTrue(con.can_read())
-
- self.assertEqual(b"stdout\n", con.read(4096))
-
- # Fixme: it's return empty string
- # self.assertEqual(b'stderr\n', con.read_stderr(4096))
blob - 7e8ed261b8c47bc1d7b663a4574e91931880cc98 (mode 644)
blob + /dev/null
--- dulwich/contrib/test_release_robot.py
+++ /dev/null
-# release_robot.py
-#
-# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
-# General Public License as public by the Free Software Foundation; version 2.0
-# or (at your option) any later version. You can redistribute it and/or
-# modify it under the terms of either of these two licenses.
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# You should have received a copy of the licenses; if not, see
-# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
-# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
-# License, Version 2.0.
-#
-
-"""Tests for release_robot."""
-
-import datetime
-import os
-import re
-import shutil
-import tempfile
-import time
-import unittest
-from typing import ClassVar, Dict, List, Optional, Tuple
-
-from dulwich.contrib import release_robot
-
-from ..repo import Repo
-from ..tests.utils import make_commit, make_tag
-
-BASEDIR = os.path.abspath(os.path.dirname(__file__)) # this directory
-
-
-def gmtime_to_datetime(gmt):
- return datetime.datetime(*time.gmtime(gmt)[:6])
-
-
-class TagPatternTests(unittest.TestCase):
- """test tag patterns."""
-
- def test_tag_pattern(self):
- """Test tag patterns."""
- test_cases = {
- "0.3": "0.3",
- "v0.3": "0.3",
- "release0.3": "0.3",
- "Release-0.3": "0.3",
- "v0.3rc1": "0.3rc1",
- "v0.3-rc1": "0.3-rc1",
- "v0.3-rc.1": "0.3-rc.1",
- "version 0.3": "0.3",
- "version_0.3_rc_1": "0.3_rc_1",
- "v1": "1",
- "0.3rc1": "0.3rc1",
- }
- for testcase, version in test_cases.items():
- matches = re.match(release_robot.PATTERN, testcase)
- self.assertEqual(matches.group(1), version)
-
-
-class GetRecentTagsTest(unittest.TestCase):
- """test get recent tags."""
-
- # Git repo for dulwich project
- test_repo = os.path.join(BASEDIR, "dulwich_test_repo.zip")
- committer = b"Mark Mikofski <mark.mikofski@sunpowercorp.com>"
- test_tags: ClassVar[List[bytes]] = [b"v0.1a", b"v0.1"]
- tag_test_data: ClassVar[
- Dict[bytes, Tuple[int, bytes, Optional[Tuple[int, bytes]]]]
- ] = {
- test_tags[0]: (1484788003, b"3" * 40, None),
- test_tags[1]: (1484788314, b"1" * 40, (1484788401, b"2" * 40)),
- }
-
- @classmethod
- def setUpClass(cls):
- cls.projdir = tempfile.mkdtemp() # temporary project directory
- cls.repo = Repo.init(cls.projdir) # test repo
- obj_store = cls.repo.object_store # test repo object store
- # commit 1 ('2017-01-19T01:06:43')
- cls.c1 = make_commit(
- id=cls.tag_test_data[cls.test_tags[0]][1],
- commit_time=cls.tag_test_data[cls.test_tags[0]][0],
- message=b"unannotated tag",
- author=cls.committer,
- )
- obj_store.add_object(cls.c1)
- # tag 1: unannotated
- cls.t1 = cls.test_tags[0]
- cls.repo[b"refs/tags/" + cls.t1] = cls.c1.id # add unannotated tag
- # commit 2 ('2017-01-19T01:11:54')
- cls.c2 = make_commit(
- id=cls.tag_test_data[cls.test_tags[1]][1],
- commit_time=cls.tag_test_data[cls.test_tags[1]][0],
- message=b"annotated tag",
- parents=[cls.c1.id],
- author=cls.committer,
- )
- obj_store.add_object(cls.c2)
- # tag 2: annotated ('2017-01-19T01:13:21')
- cls.t2 = make_tag(
- cls.c2,
- id=cls.tag_test_data[cls.test_tags[1]][2][1],
- name=cls.test_tags[1],
- tag_time=cls.tag_test_data[cls.test_tags[1]][2][0],
- )
- obj_store.add_object(cls.t2)
- cls.repo[b"refs/heads/master"] = cls.c2.id
- cls.repo[b"refs/tags/" + cls.t2.name] = cls.t2.id # add annotated tag
-
- @classmethod
- def tearDownClass(cls):
- cls.repo.close()
- shutil.rmtree(cls.projdir)
-
- def test_get_recent_tags(self):
- """Test get recent tags."""
- tags = release_robot.get_recent_tags(self.projdir) # get test tags
- for tag, metadata in tags:
- tag = tag.encode("utf-8")
- test_data = self.tag_test_data[tag] # test data tag
- # test commit date, id and author name
- self.assertEqual(metadata[0], gmtime_to_datetime(test_data[0]))
- self.assertEqual(metadata[1].encode("utf-8"), test_data[1])
- self.assertEqual(metadata[2].encode("utf-8"), self.committer)
- # skip unannotated tags
- tag_obj = test_data[2]
- if not tag_obj:
- continue
- # tag date, id and name
- self.assertEqual(metadata[3][0], gmtime_to_datetime(tag_obj[0]))
- self.assertEqual(metadata[3][1].encode("utf-8"), tag_obj[1])
- self.assertEqual(metadata[3][2].encode("utf-8"), tag)
blob - 32015c225076acdc6e13bfd76727c7682622f88b (mode 644)
blob + /dev/null
--- dulwich/contrib/test_swift.py
+++ /dev/null
-# test_swift.py -- Unittests for the Swift backend.
-# Copyright (C) 2013 eNovance SAS <licensing@enovance.com>
-#
-# Author: Fabien Boucher <fabien.boucher@enovance.com>
-#
-# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
-# General Public License as public by the Free Software Foundation; version 2.0
-# or (at your option) any later version. You can redistribute it and/or
-# modify it under the terms of either of these two licenses.
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# You should have received a copy of the licenses; if not, see
-# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
-# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
-# License, Version 2.0.
-#
-
-"""Tests for dulwich.contrib.swift."""
-
-import json
-import posixpath
-from io import BytesIO, StringIO
-from time import time
-from unittest import skipIf
-
-from dulwich.tests import TestCase
-
-from ..objects import Blob, Commit, Tag, Tree, parse_timezone
-from ..tests.test_object_store import ObjectStoreTests
-
-missing_libs = []
-
-try:
- import gevent # noqa: F401
-except ModuleNotFoundError:
- missing_libs.append("gevent")
-
-try:
- import geventhttpclient # noqa: F401
-except ModuleNotFoundError:
- missing_libs.append("geventhttpclient")
-
-try:
- from unittest.mock import patch
-except ModuleNotFoundError:
- missing_libs.append("mock")
-
-skipmsg = "Required libraries are not installed (%r)" % missing_libs
-
-
-if not missing_libs:
- from dulwich.contrib import swift
-
-config_file = """[swift]
-auth_url = http://127.0.0.1:8080/auth/%(version_str)s
-auth_ver = %(version_int)s
-username = test;tester
-password = testing
-region_name = %(region_name)s
-endpoint_type = %(endpoint_type)s
-concurrency = %(concurrency)s
-chunk_length = %(chunk_length)s
-cache_length = %(cache_length)s
-http_pool_length = %(http_pool_length)s
-http_timeout = %(http_timeout)s
-"""
-
-def_config_file = {
- "version_str": "v1.0",
- "version_int": 1,
- "concurrency": 1,
- "chunk_length": 12228,
- "cache_length": 1,
- "region_name": "test",
- "endpoint_type": "internalURL",
- "http_pool_length": 1,
- "http_timeout": 1,
-}
-
-
-def create_swift_connector(store={}):
- return lambda root, conf: FakeSwiftConnector(root, conf=conf, store=store)
-
-
-class Response:
- def __init__(self, headers={}, status=200, content=None) -> None:
- self.headers = headers
- self.status_code = status
- self.content = content
-
- def __getitem__(self, key):
- return self.headers[key]
-
- def items(self):
- return self.headers.items()
-
- def read(self):
- return self.content
-
-
-def fake_auth_request_v1(*args, **kwargs):
- ret = Response(
- {
- "X-Storage-Url": "http://127.0.0.1:8080/v1.0/AUTH_fakeuser",
- "X-Auth-Token": "12" * 10,
- },
- 200,
- )
- return ret
-
-
-def fake_auth_request_v1_error(*args, **kwargs):
- ret = Response({}, 401)
- return ret
-
-
-def fake_auth_request_v2(*args, **kwargs):
- s_url = "http://127.0.0.1:8080/v1.0/AUTH_fakeuser"
- resp = {
- "access": {
- "token": {"id": "12" * 10},
- "serviceCatalog": [
- {
- "type": "object-store",
- "endpoints": [
- {
- "region": "test",
- "internalURL": s_url,
- },
- ],
- },
- ],
- }
- }
- ret = Response(status=200, content=json.dumps(resp))
- return ret
-
-
-def create_commit(data, marker=b"Default", blob=None):
- if not blob:
- blob = Blob.from_string(b"The blob content " + marker)
- tree = Tree()
- tree.add(b"thefile_" + marker, 0o100644, blob.id)
- cmt = Commit()
- if data:
- assert isinstance(data[-1], Commit)
- cmt.parents = [data[-1].id]
- cmt.tree = tree.id
- author = b"John Doe " + marker + b" <john@doe.net>"
- cmt.author = cmt.committer = author
- tz = parse_timezone(b"-0200")[0]
- cmt.commit_time = cmt.author_time = int(time())
- cmt.commit_timezone = cmt.author_timezone = tz
- cmt.encoding = b"UTF-8"
- cmt.message = b"The commit message " + marker
- tag = Tag()
- tag.tagger = b"john@doe.net"
- tag.message = b"Annotated tag"
- tag.tag_timezone = parse_timezone(b"-0200")[0]
- tag.tag_time = cmt.author_time
- tag.object = (Commit, cmt.id)
- tag.name = b"v_" + marker + b"_0.1"
- return blob, tree, tag, cmt
-
-
-def create_commits(length=1, marker=b"Default"):
- data = []
- for i in range(length):
- _marker = (f"{marker}_{i}").encode()
- blob, tree, tag, cmt = create_commit(data, _marker)
- data.extend([blob, tree, tag, cmt])
- return data
-
-
-@skipIf(missing_libs, skipmsg)
-class FakeSwiftConnector:
- def __init__(self, root, conf, store=None) -> None:
- if store:
- self.store = store
- else:
- self.store = {}
- self.conf = conf
- self.root = root
- self.concurrency = 1
- self.chunk_length = 12228
- self.cache_length = 1
-
- def put_object(self, name, content):
- name = posixpath.join(self.root, name)
- if hasattr(content, "seek"):
- content.seek(0)
- content = content.read()
- self.store[name] = content
-
- def get_object(self, name, range=None):
- name = posixpath.join(self.root, name)
- if not range:
- try:
- return BytesIO(self.store[name])
- except KeyError:
- return None
- else:
- l, r = range.split("-")
- try:
- if not l:
- r = -int(r)
- return self.store[name][r:]
- else:
- return self.store[name][int(l) : int(r)]
- except KeyError:
- return None
-
- def get_container_objects(self):
- return [{"name": k.replace(self.root + "/", "")} for k in self.store]
-
- def create_root(self):
- if self.root in self.store.keys():
- pass
- else:
- self.store[self.root] = ""
-
- def get_object_stat(self, name):
- name = posixpath.join(self.root, name)
- if name not in self.store:
- return None
- return {"content-length": len(self.store[name])}
-
-
-@skipIf(missing_libs, skipmsg)
-class TestSwiftRepo(TestCase):
- def setUp(self):
- super().setUp()
- self.conf = swift.load_conf(file=StringIO(config_file % def_config_file))
-
- def test_init(self):
- store = {"fakerepo/objects/pack": ""}
- with patch(
- "dulwich.contrib.swift.SwiftConnector",
- new_callable=create_swift_connector,
- store=store,
- ):
- swift.SwiftRepo("fakerepo", conf=self.conf)
-
- def test_init_no_data(self):
- with patch(
- "dulwich.contrib.swift.SwiftConnector",
- new_callable=create_swift_connector,
- ):
- self.assertRaises(Exception, swift.SwiftRepo, "fakerepo", self.conf)
-
- def test_init_bad_data(self):
- store = {"fakerepo/.git/objects/pack": ""}
- with patch(
- "dulwich.contrib.swift.SwiftConnector",
- new_callable=create_swift_connector,
- store=store,
- ):
- self.assertRaises(Exception, swift.SwiftRepo, "fakerepo", self.conf)
-
- def test_put_named_file(self):
- store = {"fakerepo/objects/pack": ""}
- with patch(
- "dulwich.contrib.swift.SwiftConnector",
- new_callable=create_swift_connector,
- store=store,
- ):
- repo = swift.SwiftRepo("fakerepo", conf=self.conf)
- desc = b"Fake repo"
- repo._put_named_file("description", desc)
- self.assertEqual(repo.scon.store["fakerepo/description"], desc)
-
- def test_init_bare(self):
- fsc = FakeSwiftConnector("fakeroot", conf=self.conf)
- with patch(
- "dulwich.contrib.swift.SwiftConnector",
- new_callable=create_swift_connector,
- store=fsc.store,
- ):
- swift.SwiftRepo.init_bare(fsc, conf=self.conf)
- self.assertIn("fakeroot/objects/pack", fsc.store)
- self.assertIn("fakeroot/info/refs", fsc.store)
- self.assertIn("fakeroot/description", fsc.store)
-
-
-@skipIf(missing_libs, skipmsg)
-class TestSwiftInfoRefsContainer(TestCase):
- def setUp(self):
- super().setUp()
- content = (
- b"22effb216e3a82f97da599b8885a6cadb488b4c5\trefs/heads/master\n"
- b"cca703b0e1399008b53a1a236d6b4584737649e4\trefs/heads/dev"
- )
- self.store = {"fakerepo/info/refs": content}
- self.conf = swift.load_conf(file=StringIO(config_file % def_config_file))
- self.fsc = FakeSwiftConnector("fakerepo", conf=self.conf)
- self.object_store = {}
-
- def test_init(self):
- """info/refs does not exists."""
- irc = swift.SwiftInfoRefsContainer(self.fsc, self.object_store)
- self.assertEqual(len(irc._refs), 0)
- self.fsc.store = self.store
- irc = swift.SwiftInfoRefsContainer(self.fsc, self.object_store)
- self.assertIn(b"refs/heads/dev", irc.allkeys())
- self.assertIn(b"refs/heads/master", irc.allkeys())
-
- def test_set_if_equals(self):
- self.fsc.store = self.store
- irc = swift.SwiftInfoRefsContainer(self.fsc, self.object_store)
- irc.set_if_equals(
- b"refs/heads/dev",
- b"cca703b0e1399008b53a1a236d6b4584737649e4",
- b"1" * 40,
- )
- self.assertEqual(irc[b"refs/heads/dev"], b"1" * 40)
-
- def test_remove_if_equals(self):
- self.fsc.store = self.store
- irc = swift.SwiftInfoRefsContainer(self.fsc, self.object_store)
- irc.remove_if_equals(
- b"refs/heads/dev", b"cca703b0e1399008b53a1a236d6b4584737649e4"
- )
- self.assertNotIn(b"refs/heads/dev", irc.allkeys())
-
-
-@skipIf(missing_libs, skipmsg)
-class TestSwiftConnector(TestCase):
- def setUp(self):
- super().setUp()
- self.conf = swift.load_conf(file=StringIO(config_file % def_config_file))
- with patch("geventhttpclient.HTTPClient.request", fake_auth_request_v1):
- self.conn = swift.SwiftConnector("fakerepo", conf=self.conf)
-
- def test_init_connector(self):
- self.assertEqual(self.conn.auth_ver, "1")
- self.assertEqual(self.conn.auth_url, "http://127.0.0.1:8080/auth/v1.0")
- self.assertEqual(self.conn.user, "test:tester")
- self.assertEqual(self.conn.password, "testing")
- self.assertEqual(self.conn.root, "fakerepo")
- self.assertEqual(
- self.conn.storage_url, "http://127.0.0.1:8080/v1.0/AUTH_fakeuser"
- )
- self.assertEqual(self.conn.token, "12" * 10)
- self.assertEqual(self.conn.http_timeout, 1)
- self.assertEqual(self.conn.http_pool_length, 1)
- self.assertEqual(self.conn.concurrency, 1)
- self.conf.set("swift", "auth_ver", "2")
- self.conf.set("swift", "auth_url", "http://127.0.0.1:8080/auth/v2.0")
- with patch("geventhttpclient.HTTPClient.request", fake_auth_request_v2):
- conn = swift.SwiftConnector("fakerepo", conf=self.conf)
- self.assertEqual(conn.user, "tester")
- self.assertEqual(conn.tenant, "test")
- self.conf.set("swift", "auth_ver", "1")
- self.conf.set("swift", "auth_url", "http://127.0.0.1:8080/auth/v1.0")
- with patch("geventhttpclient.HTTPClient.request", fake_auth_request_v1_error):
- self.assertRaises(
- swift.SwiftException,
- lambda: swift.SwiftConnector("fakerepo", conf=self.conf),
- )
-
- def test_root_exists(self):
- with patch("geventhttpclient.HTTPClient.request", lambda *args: Response()):
- self.assertEqual(self.conn.test_root_exists(), True)
-
- def test_root_not_exists(self):
- with patch(
- "geventhttpclient.HTTPClient.request",
- lambda *args: Response(status=404),
- ):
- self.assertEqual(self.conn.test_root_exists(), None)
-
- def test_create_root(self):
- with patch(
- "dulwich.contrib.swift.SwiftConnector.test_root_exists",
- lambda *args: None,
- ):
- with patch("geventhttpclient.HTTPClient.request", lambda *args: Response()):
- self.assertEqual(self.conn.create_root(), None)
-
- def test_create_root_fails(self):
- with patch(
- "dulwich.contrib.swift.SwiftConnector.test_root_exists",
- lambda *args: None,
- ):
- with patch(
- "geventhttpclient.HTTPClient.request",
- lambda *args: Response(status=404),
- ):
- self.assertRaises(swift.SwiftException, self.conn.create_root)
-
- def test_get_container_objects(self):
- with patch(
- "geventhttpclient.HTTPClient.request",
- lambda *args: Response(content=json.dumps(({"name": "a"}, {"name": "b"}))),
- ):
- self.assertEqual(len(self.conn.get_container_objects()), 2)
-
- def test_get_container_objects_fails(self):
- with patch(
- "geventhttpclient.HTTPClient.request",
- lambda *args: Response(status=404),
- ):
- self.assertEqual(self.conn.get_container_objects(), None)
-
- def test_get_object_stat(self):
- with patch(
- "geventhttpclient.HTTPClient.request",
- lambda *args: Response(headers={"content-length": "10"}),
- ):
- self.assertEqual(self.conn.get_object_stat("a")["content-length"], "10")
-
- def test_get_object_stat_fails(self):
- with patch(
- "geventhttpclient.HTTPClient.request",
- lambda *args: Response(status=404),
- ):
- self.assertEqual(self.conn.get_object_stat("a"), None)
-
- def test_put_object(self):
- with patch(
- "geventhttpclient.HTTPClient.request",
- lambda *args, **kwargs: Response(),
- ):
- self.assertEqual(self.conn.put_object("a", BytesIO(b"content")), None)
-
- def test_put_object_fails(self):
- with patch(
- "geventhttpclient.HTTPClient.request",
- lambda *args, **kwargs: Response(status=400),
- ):
- self.assertRaises(
- swift.SwiftException,
- lambda: self.conn.put_object("a", BytesIO(b"content")),
- )
-
- def test_get_object(self):
- with patch(
- "geventhttpclient.HTTPClient.request",
- lambda *args, **kwargs: Response(content=b"content"),
- ):
- self.assertEqual(self.conn.get_object("a").read(), b"content")
- with patch(
- "geventhttpclient.HTTPClient.request",
- lambda *args, **kwargs: Response(content=b"content"),
- ):
- self.assertEqual(self.conn.get_object("a", range="0-6"), b"content")
-
- def test_get_object_fails(self):
- with patch(
- "geventhttpclient.HTTPClient.request",
- lambda *args, **kwargs: Response(status=404),
- ):
- self.assertEqual(self.conn.get_object("a"), None)
-
- def test_del_object(self):
- with patch("geventhttpclient.HTTPClient.request", lambda *args: Response()):
- self.assertEqual(self.conn.del_object("a"), None)
-
- def test_del_root(self):
- with patch(
- "dulwich.contrib.swift.SwiftConnector.del_object",
- lambda *args: None,
- ):
- with patch(
- "dulwich.contrib.swift.SwiftConnector." "get_container_objects",
- lambda *args: ({"name": "a"}, {"name": "b"}),
- ):
- with patch(
- "geventhttpclient.HTTPClient.request",
- lambda *args: Response(),
- ):
- self.assertEqual(self.conn.del_root(), None)
-
-
-@skipIf(missing_libs, skipmsg)
-class SwiftObjectStoreTests(ObjectStoreTests, TestCase):
- def setUp(self):
- TestCase.setUp(self)
- conf = swift.load_conf(file=StringIO(config_file % def_config_file))
- fsc = FakeSwiftConnector("fakerepo", conf=conf)
- self.store = swift.SwiftObjectStore(fsc)
blob - 2a19f6df2b680aeb87dcd5e56cdd3e03e22b9942 (mode 644)
blob + /dev/null
--- dulwich/contrib/test_swift_smoke.py
+++ /dev/null
-# test_smoke.py -- Functional tests for the Swift backend.
-# Copyright (C) 2013 eNovance SAS <licensing@enovance.com>
-#
-# Author: Fabien Boucher <fabien.boucher@enovance.com>
-#
-# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
-# General Public License as public by the Free Software Foundation; version 2.0
-# or (at your option) any later version. You can redistribute it and/or
-# modify it under the terms of either of these two licenses.
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# You should have received a copy of the licenses; if not, see
-# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
-# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
-# License, Version 2.0.
-#
-
-"""Start functional tests.
-
-A Swift installation must be available before
-starting those tests. The account and authentication method used
-during this functional tests must be changed in the configuration file
-passed as environment variable.
-The container used to create a fake repository is defined
-in cls.fakerepo and will be deleted after the tests.
-
-DULWICH_SWIFT_CFG=/tmp/conf.cfg PYTHONPATH=. python -m unittest \
- dulwich.tests_swift.test_smoke
-"""
-
-import os
-import shutil
-import tempfile
-import unittest
-
-import gevent
-from gevent import monkey
-
-monkey.patch_all()
-
-from dulwich import client, index, objects, repo, server # noqa: E402
-from dulwich.contrib import swift # noqa: E402
-
-
-class DulwichServer:
- """Start the TCPGitServer with Swift backend."""
-
- def __init__(self, backend, port) -> None:
- self.port = port
- self.backend = backend
-
- def run(self):
- self.server = server.TCPGitServer(self.backend, "localhost", port=self.port)
- self.job = gevent.spawn(self.server.serve_forever)
-
- def stop(self):
- self.server.shutdown()
- gevent.joinall((self.job,))
-
-
-class SwiftSystemBackend(server.Backend):
- def open_repository(self, path):
- return swift.SwiftRepo(path, conf=swift.load_conf())
-
-
-class SwiftRepoSmokeTest(unittest.TestCase):
- @classmethod
- def setUpClass(cls):
- cls.backend = SwiftSystemBackend()
- cls.port = 9148
- cls.server_address = "localhost"
- cls.fakerepo = "fakerepo"
- cls.th_server = DulwichServer(cls.backend, cls.port)
- cls.th_server.run()
- cls.conf = swift.load_conf()
-
- @classmethod
- def tearDownClass(cls):
- cls.th_server.stop()
-
- def setUp(self):
- self.scon = swift.SwiftConnector(self.fakerepo, self.conf)
- if self.scon.test_root_exists():
- try:
- self.scon.del_root()
- except swift.SwiftException:
- pass
- self.temp_d = tempfile.mkdtemp()
- if os.path.isdir(self.temp_d):
- shutil.rmtree(self.temp_d)
-
- def tearDown(self):
- if self.scon.test_root_exists():
- try:
- self.scon.del_root()
- except swift.SwiftException:
- pass
- if os.path.isdir(self.temp_d):
- shutil.rmtree(self.temp_d)
-
- def test_init_bare(self):
- swift.SwiftRepo.init_bare(self.scon, self.conf)
- self.assertTrue(self.scon.test_root_exists())
- obj = self.scon.get_container_objects()
- filtered = [
- o for o in obj if o["name"] == "info/refs" or o["name"] == "objects/pack"
- ]
- self.assertEqual(len(filtered), 2)
-
- def test_clone_bare(self):
- local_repo = repo.Repo.init(self.temp_d, mkdir=True)
- swift.SwiftRepo.init_bare(self.scon, self.conf)
- tcp_client = client.TCPGitClient(self.server_address, port=self.port)
- remote_refs = tcp_client.fetch(self.fakerepo, local_repo)
- # The remote repo is empty (no refs retrieved)
- self.assertEqual(remote_refs, None)
-
- def test_push_commit(self):
- def determine_wants(*args, **kwargs):
- return {"refs/heads/master": local_repo.refs["HEAD"]}
-
- local_repo = repo.Repo.init(self.temp_d, mkdir=True)
- # Nothing in the staging area
- local_repo.do_commit("Test commit", "fbo@localhost")
- sha = local_repo.refs.read_loose_ref("refs/heads/master")
- swift.SwiftRepo.init_bare(self.scon, self.conf)
- tcp_client = client.TCPGitClient(self.server_address, port=self.port)
- tcp_client.send_pack(
- self.fakerepo, determine_wants, local_repo.generate_pack_data
- )
- swift_repo = swift.SwiftRepo("fakerepo", self.conf)
- remote_sha = swift_repo.refs.read_loose_ref("refs/heads/master")
- self.assertEqual(sha, remote_sha)
-
- def test_push_branch(self):
- def determine_wants(*args, **kwargs):
- return {"refs/heads/mybranch": local_repo.refs["refs/heads/mybranch"]}
-
- local_repo = repo.Repo.init(self.temp_d, mkdir=True)
- # Nothing in the staging area
- local_repo.do_commit("Test commit", "fbo@localhost", ref="refs/heads/mybranch")
- sha = local_repo.refs.read_loose_ref("refs/heads/mybranch")
- swift.SwiftRepo.init_bare(self.scon, self.conf)
- tcp_client = client.TCPGitClient(self.server_address, port=self.port)
- tcp_client.send_pack(
- "/fakerepo", determine_wants, local_repo.generate_pack_data
- )
- swift_repo = swift.SwiftRepo(self.fakerepo, self.conf)
- remote_sha = swift_repo.refs.read_loose_ref("refs/heads/mybranch")
- self.assertEqual(sha, remote_sha)
-
- def test_push_multiple_branch(self):
- def determine_wants(*args, **kwargs):
- return {
- "refs/heads/mybranch": local_repo.refs["refs/heads/mybranch"],
- "refs/heads/master": local_repo.refs["refs/heads/master"],
- "refs/heads/pullr-108": local_repo.refs["refs/heads/pullr-108"],
- }
-
- local_repo = repo.Repo.init(self.temp_d, mkdir=True)
- # Nothing in the staging area
- local_shas = {}
- remote_shas = {}
- for branch in ("master", "mybranch", "pullr-108"):
- local_shas[branch] = local_repo.do_commit(
- "Test commit %s" % branch,
- "fbo@localhost",
- ref="refs/heads/%s" % branch,
- )
- swift.SwiftRepo.init_bare(self.scon, self.conf)
- tcp_client = client.TCPGitClient(self.server_address, port=self.port)
- tcp_client.send_pack(
- self.fakerepo, determine_wants, local_repo.generate_pack_data
- )
- swift_repo = swift.SwiftRepo("fakerepo", self.conf)
- for branch in ("master", "mybranch", "pullr-108"):
- remote_shas[branch] = swift_repo.refs.read_loose_ref(
- "refs/heads/%s" % branch
- )
- self.assertDictEqual(local_shas, remote_shas)
-
- def test_push_data_branch(self):
- def determine_wants(*args, **kwargs):
- return {"refs/heads/master": local_repo.refs["HEAD"]}
-
- local_repo = repo.Repo.init(self.temp_d, mkdir=True)
- os.mkdir(os.path.join(self.temp_d, "dir"))
- files = ("testfile", "testfile2", "dir/testfile3")
- i = 0
- for f in files:
- open(os.path.join(self.temp_d, f), "w").write("DATA %s" % i)
- i += 1
- local_repo.stage(files)
- local_repo.do_commit("Test commit", "fbo@localhost", ref="refs/heads/master")
- swift.SwiftRepo.init_bare(self.scon, self.conf)
- tcp_client = client.TCPGitClient(self.server_address, port=self.port)
- tcp_client.send_pack(
- self.fakerepo, determine_wants, local_repo.generate_pack_data
- )
- swift_repo = swift.SwiftRepo("fakerepo", self.conf)
- commit_sha = swift_repo.refs.read_loose_ref("refs/heads/master")
- otype, data = swift_repo.object_store.get_raw(commit_sha)
- commit = objects.ShaFile.from_raw_string(otype, data)
- otype, data = swift_repo.object_store.get_raw(commit._tree)
- tree = objects.ShaFile.from_raw_string(otype, data)
- objs = tree.items()
- objs_ = []
- for tree_entry in objs:
- objs_.append(swift_repo.object_store.get_raw(tree_entry.sha))
- # Blob
- self.assertEqual(objs_[1][1], "DATA 0")
- self.assertEqual(objs_[2][1], "DATA 1")
- # Tree
- self.assertEqual(objs_[0][0], 2)
-
- def test_clone_then_push_data(self):
- self.test_push_data_branch()
- shutil.rmtree(self.temp_d)
- local_repo = repo.Repo.init(self.temp_d, mkdir=True)
- tcp_client = client.TCPGitClient(self.server_address, port=self.port)
- remote_refs = tcp_client.fetch(self.fakerepo, local_repo)
- files = (
- os.path.join(self.temp_d, "testfile"),
- os.path.join(self.temp_d, "testfile2"),
- )
- local_repo["HEAD"] = remote_refs["refs/heads/master"]
- indexfile = local_repo.index_path()
- tree = local_repo["HEAD"].tree
- index.build_index_from_tree(
- local_repo.path, indexfile, local_repo.object_store, tree
- )
- for f in files:
- self.assertEqual(os.path.isfile(f), True)
-
- def determine_wants(*args, **kwargs):
- return {"refs/heads/master": local_repo.refs["HEAD"]}
-
- os.mkdir(os.path.join(self.temp_d, "test"))
- files = ("testfile11", "testfile22", "test/testfile33")
- i = 0
- for f in files:
- open(os.path.join(self.temp_d, f), "w").write("DATA %s" % i)
- i += 1
- local_repo.stage(files)
- local_repo.do_commit("Test commit", "fbo@localhost", ref="refs/heads/master")
- tcp_client.send_pack(
- "/fakerepo", determine_wants, local_repo.generate_pack_data
- )
-
- def test_push_remove_branch(self):
- def determine_wants(*args, **kwargs):
- return {
- "refs/heads/pullr-108": objects.ZERO_SHA,
- "refs/heads/master": local_repo.refs["refs/heads/master"],
- "refs/heads/mybranch": local_repo.refs["refs/heads/mybranch"],
- }
-
- self.test_push_multiple_branch()
- local_repo = repo.Repo(self.temp_d)
- tcp_client = client.TCPGitClient(self.server_address, port=self.port)
- tcp_client.send_pack(
- self.fakerepo, determine_wants, local_repo.generate_pack_data
- )
- swift_repo = swift.SwiftRepo("fakerepo", self.conf)
- self.assertNotIn("refs/heads/pullr-108", swift_repo.refs.allkeys())
-
- def test_push_annotated_tag(self):
- def determine_wants(*args, **kwargs):
- return {
- "refs/heads/master": local_repo.refs["HEAD"],
- "refs/tags/v1.0": local_repo.refs["refs/tags/v1.0"],
- }
-
- local_repo = repo.Repo.init(self.temp_d, mkdir=True)
- # Nothing in the staging area
- sha = local_repo.do_commit("Test commit", "fbo@localhost")
- otype, data = local_repo.object_store.get_raw(sha)
- commit = objects.ShaFile.from_raw_string(otype, data)
- tag = objects.Tag()
- tag.tagger = "fbo@localhost"
- tag.message = "Annotated tag"
- tag.tag_timezone = objects.parse_timezone("-0200")[0]
- tag.tag_time = commit.author_time
- tag.object = (objects.Commit, commit.id)
- tag.name = "v0.1"
- local_repo.object_store.add_object(tag)
- local_repo.refs["refs/tags/v1.0"] = tag.id
- swift.SwiftRepo.init_bare(self.scon, self.conf)
- tcp_client = client.TCPGitClient(self.server_address, port=self.port)
- tcp_client.send_pack(
- self.fakerepo, determine_wants, local_repo.generate_pack_data
- )
- swift_repo = swift.SwiftRepo(self.fakerepo, self.conf)
- tag_sha = swift_repo.refs.read_loose_ref("refs/tags/v1.0")
- otype, data = swift_repo.object_store.get_raw(tag_sha)
- rtag = objects.ShaFile.from_raw_string(otype, data)
- self.assertEqual(rtag.object[1], commit.id)
- self.assertEqual(rtag.id, tag.id)
-
-
-if __name__ == "__main__":
- unittest.main()
blob - 7fe34f7a969f5dca85908b43263c2c9c59f9b6c9 (mode 644)
blob + /dev/null
--- dulwich/tests/__init__.py
+++ /dev/null
-# __init__.py -- The tests for dulwich
-# Copyright (C) 2007 James Westby <jw+debian@jameswestby.net>
-#
-# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
-# General Public License as public by the Free Software Foundation; version 2.0
-# or (at your option) any later version. You can redistribute it and/or
-# modify it under the terms of either of these two licenses.
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# You should have received a copy of the licenses; if not, see
-# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
-# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
-# License, Version 2.0.
-#
-
-"""Tests for Dulwich."""
-
-__all__ = [
- "SkipTest",
- "TestCase",
- "BlackboxTestCase",
- "skipIf",
- "expectedFailure",
-]
-
-import doctest
-import os
-import shutil
-import subprocess
-import sys
-import tempfile
-
-# If Python itself provides an exception, use that
-import unittest
-from typing import ClassVar, List
-from unittest import SkipTest, expectedFailure, skipIf
-from unittest import TestCase as _TestCase
-
-
-class TestCase(_TestCase):
- def setUp(self):
- super().setUp()
- self.overrideEnv("HOME", "/nonexistent")
- self.overrideEnv("GIT_CONFIG_NOSYSTEM", "1")
-
- def overrideEnv(self, name, value):
- def restore():
- if oldval is not None:
- os.environ[name] = oldval
- else:
- del os.environ[name]
-
- oldval = os.environ.get(name)
- if value is not None:
- os.environ[name] = value
- else:
- del os.environ[name]
- self.addCleanup(restore)
-
-
-class BlackboxTestCase(TestCase):
- """Blackbox testing."""
-
- # TODO(jelmer): Include more possible binary paths.
- bin_directories: ClassVar[List[str]] = [
- os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "..", "bin")),
- "/usr/bin",
- "/usr/local/bin",
- ]
-
- def bin_path(self, name):
- """Determine the full path of a binary.
-
- Args:
- name: Name of the script
- Returns: Full path
- """
- for d in self.bin_directories:
- p = os.path.join(d, name)
- if os.path.isfile(p):
- return p
- else:
- raise SkipTest("Unable to find binary %s" % name)
-
- def run_command(self, name, args):
- """Run a Dulwich command.
-
- Args:
- name: Name of the command, as it exists in bin/
- args: Arguments to the command
- """
- env = dict(os.environ)
- env["PYTHONPATH"] = os.pathsep.join(sys.path)
-
- # Since they don't have any extensions, Windows can't recognize
- # executablility of the Python files in /bin. Even then, we'd have to
- # expect the user to set up file associations for .py files.
- #
- # Save us from all that headache and call python with the bin script.
- argv = [sys.executable, self.bin_path(name), *args]
- return subprocess.Popen(
- argv,
- stdout=subprocess.PIPE,
- stdin=subprocess.PIPE,
- stderr=subprocess.PIPE,
- env=env,
- )
-
-
-def self_test_suite():
- names = [
- "archive",
- "blackbox",
- "bundle",
- "client",
- "config",
- "credentials",
- "diff_tree",
- "fastexport",
- "file",
- "grafts",
- "graph",
- "greenthreads",
- "hooks",
- "ignore",
- "index",
- "lfs",
- "line_ending",
- "lru_cache",
- "mailmap",
- "objects",
- "objectspec",
- "object_store",
- "missing_obj_finder",
- "pack",
- "patch",
- "porcelain",
- "protocol",
- "reflog",
- "refs",
- "repository",
- "server",
- "stash",
- "utils",
- "walk",
- "web",
- ]
- module_names = ["dulwich.tests.test_" + name for name in names]
- loader = unittest.TestLoader()
- return loader.loadTestsFromNames(module_names)
-
-
-def tutorial_test_suite():
- tutorial = [
- "introduction",
- "file-format",
- "repo",
- "object-store",
- "remote",
- "conclusion",
- ]
- tutorial_files = [f"../../docs/tutorial/{name}.txt" for name in tutorial]
-
- to_restore = []
-
- def overrideEnv(name, value):
- oldval = os.environ.get(name)
- if value is not None:
- os.environ[name] = value
- else:
- del os.environ[name]
- to_restore.append((name, oldval))
-
- def setup(test):
- test.__old_cwd = os.getcwd()
- test.tempdir = tempfile.mkdtemp()
- test.globs.update({"tempdir": test.tempdir})
- os.chdir(test.tempdir)
- overrideEnv("HOME", "/nonexistent")
- overrideEnv("GIT_CONFIG_NOSYSTEM", "1")
-
- def teardown(test):
- os.chdir(test.__old_cwd)
- shutil.rmtree(test.tempdir)
- for name, oldval in to_restore:
- if oldval is not None:
- os.environ[name] = oldval
- else:
- del os.environ[name]
- to_restore.clear()
-
- return doctest.DocFileSuite(
- module_relative=True,
- package="dulwich.tests",
- setUp=setup,
- tearDown=teardown,
- *tutorial_files,
- )
-
-
-def nocompat_test_suite():
- result = unittest.TestSuite()
- result.addTests(self_test_suite())
- result.addTests(tutorial_test_suite())
- from dulwich.contrib import test_suite as contrib_test_suite
-
- result.addTests(contrib_test_suite())
- return result
-
-
-def compat_test_suite():
- result = unittest.TestSuite()
- from dulwich.tests.compat import test_suite as compat_test_suite
-
- result.addTests(compat_test_suite())
- return result
-
-
-def test_suite():
- result = unittest.TestSuite()
- result.addTests(self_test_suite())
- if sys.platform != "win32":
- result.addTests(tutorial_test_suite())
- from dulwich.tests.compat import test_suite as compat_test_suite
-
- result.addTests(compat_test_suite())
- from dulwich.contrib import test_suite as contrib_test_suite
-
- result.addTests(contrib_test_suite())
- return result
blob - 24747775c7203290f3407f5f91597142995c6d8e (mode 644)
blob + /dev/null
--- dulwich/tests/compat/__init__.py
+++ /dev/null
-# __init__.py -- Compatibility tests for dulwich
-# Copyright (C) 2010 Jelmer Vernooij <jelmer@jelmer.uk>
-#
-# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
-# General Public License as public by the Free Software Foundation; version 2.0
-# or (at your option) any later version. You can redistribute it and/or
-# modify it under the terms of either of these two licenses.
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# You should have received a copy of the licenses; if not, see
-# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
-# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
-# License, Version 2.0.
-#
-
-"""Compatibility tests for Dulwich."""
-
-import unittest
-
-
-def test_suite():
- names = [
- "client",
- "pack",
- "patch",
- "porcelain",
- "repository",
- "server",
- "utils",
- "web",
- ]
- module_names = ["dulwich.tests.compat.test_" + name for name in names]
- result = unittest.TestSuite()
- loader = unittest.TestLoader()
- suite = loader.loadTestsFromNames(module_names)
- result.addTests(suite)
- return result
blob - 59a9beeaaf9a6cc08d34484d74257d70d7526106 (mode 644)
blob + /dev/null
--- dulwich/tests/compat/server_utils.py
+++ /dev/null
-# server_utils.py -- Git server compatibility utilities
-# Copyright (C) 2010 Google, Inc.
-#
-# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
-# General Public License as public by the Free Software Foundation; version 2.0
-# or (at your option) any later version. You can redistribute it and/or
-# modify it under the terms of either of these two licenses.
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# You should have received a copy of the licenses; if not, see
-# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
-# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
-# License, Version 2.0.
-#
-
-"""Utilities for testing git server compatibility."""
-
-import errno
-import os
-import shutil
-import socket
-import tempfile
-
-from ...objects import hex_to_sha
-from ...protocol import CAPABILITY_SIDE_BAND_64K
-from ...repo import Repo
-from ...server import ReceivePackHandler
-from ..utils import tear_down_repo
-from .utils import require_git_version, run_git_or_fail
-
-
-class _StubRepo:
- """A stub repo that just contains a path to tear down."""
-
- def __init__(self, name) -> None:
- temp_dir = tempfile.mkdtemp()
- self.path = os.path.join(temp_dir, name)
- os.mkdir(self.path)
-
- def close(self):
- pass
-
-
-def _get_shallow(repo):
- shallow_file = repo.get_named_file("shallow")
- if not shallow_file:
- return []
- shallows = []
- with shallow_file:
- for line in shallow_file:
- sha = line.strip()
- if not sha:
- continue
- hex_to_sha(sha)
- shallows.append(sha)
- return shallows
-
-
-class ServerTests:
- """Base tests for testing servers.
-
- Does not inherit from TestCase so tests are not automatically run.
- """
-
- min_single_branch_version = (
- 1,
- 7,
- 10,
- )
-
- def import_repos(self):
- self._old_repo = self.import_repo("server_old.export")
- self._new_repo = self.import_repo("server_new.export")
-
- def url(self, port):
- return f"{self.protocol}://localhost:{port}/"
-
- def branch_args(self, branches=None):
- if branches is None:
- branches = ["master", "branch"]
- return [f"{b}:{b}" for b in branches]
-
- def test_push_to_dulwich(self):
- self.import_repos()
- self.assertReposNotEqual(self._old_repo, self._new_repo)
- port = self._start_server(self._old_repo)
-
- run_git_or_fail(
- ["push", self.url(port), *self.branch_args()],
- cwd=self._new_repo.path,
- )
- self.assertReposEqual(self._old_repo, self._new_repo)
-
- def test_push_to_dulwich_no_op(self):
- self._old_repo = self.import_repo("server_old.export")
- self._new_repo = self.import_repo("server_old.export")
- self.assertReposEqual(self._old_repo, self._new_repo)
- port = self._start_server(self._old_repo)
-
- run_git_or_fail(
- ["push", self.url(port), *self.branch_args()],
- cwd=self._new_repo.path,
- )
- self.assertReposEqual(self._old_repo, self._new_repo)
-
- def test_push_to_dulwich_remove_branch(self):
- self._old_repo = self.import_repo("server_old.export")
- self._new_repo = self.import_repo("server_old.export")
- self.assertReposEqual(self._old_repo, self._new_repo)
- port = self._start_server(self._old_repo)
-
- run_git_or_fail(["push", self.url(port), ":master"], cwd=self._new_repo.path)
-
- self.assertEqual(list(self._old_repo.get_refs().keys()), [b"refs/heads/branch"])
-
- def test_fetch_from_dulwich(self):
- self.import_repos()
- self.assertReposNotEqual(self._old_repo, self._new_repo)
- port = self._start_server(self._new_repo)
-
- run_git_or_fail(
- ["fetch", self.url(port), *self.branch_args()],
- cwd=self._old_repo.path,
- )
- # flush the pack cache so any new packs are picked up
- self._old_repo.object_store._pack_cache_time = 0
- self.assertReposEqual(self._old_repo, self._new_repo)
-
- def test_fetch_from_dulwich_no_op(self):
- self._old_repo = self.import_repo("server_old.export")
- self._new_repo = self.import_repo("server_old.export")
- self.assertReposEqual(self._old_repo, self._new_repo)
- port = self._start_server(self._new_repo)
-
- run_git_or_fail(
- ["fetch", self.url(port), *self.branch_args()],
- cwd=self._old_repo.path,
- )
- # flush the pack cache so any new packs are picked up
- self._old_repo.object_store._pack_cache_time = 0
- self.assertReposEqual(self._old_repo, self._new_repo)
-
- def test_clone_from_dulwich_empty(self):
- old_repo_dir = tempfile.mkdtemp()
- self.addCleanup(shutil.rmtree, old_repo_dir)
- self._old_repo = Repo.init_bare(old_repo_dir)
- port = self._start_server(self._old_repo)
-
- new_repo_base_dir = tempfile.mkdtemp()
- self.addCleanup(shutil.rmtree, new_repo_base_dir)
- new_repo_dir = os.path.join(new_repo_base_dir, "empty_new")
- run_git_or_fail(["clone", self.url(port), new_repo_dir], cwd=new_repo_base_dir)
- new_repo = Repo(new_repo_dir)
- self.assertReposEqual(self._old_repo, new_repo)
-
- def test_lsremote_from_dulwich(self):
- self._repo = self.import_repo("server_old.export")
- port = self._start_server(self._repo)
- o = run_git_or_fail(["ls-remote", self.url(port)])
- self.assertEqual(len(o.split(b"\n")), 4)
-
- def test_new_shallow_clone_from_dulwich(self):
- require_git_version(self.min_single_branch_version)
- self._source_repo = self.import_repo("server_new.export")
- self._stub_repo = _StubRepo("shallow")
- self.addCleanup(tear_down_repo, self._stub_repo)
- port = self._start_server(self._source_repo)
-
- # Fetch at depth 1
- run_git_or_fail(
- [
- "clone",
- "--mirror",
- "--depth=1",
- "--no-single-branch",
- self.url(port),
- self._stub_repo.path,
- ]
- )
- clone = self._stub_repo = Repo(self._stub_repo.path)
- expected_shallow = [
- b"35e0b59e187dd72a0af294aedffc213eaa4d03ff",
- b"514dc6d3fbfe77361bcaef320c4d21b72bc10be9",
- ]
- self.assertEqual(expected_shallow, _get_shallow(clone))
- self.assertReposNotEqual(clone, self._source_repo)
-
- def test_shallow_clone_from_git_is_identical(self):
- require_git_version(self.min_single_branch_version)
- self._source_repo = self.import_repo("server_new.export")
- self._stub_repo_git = _StubRepo("shallow-git")
- self.addCleanup(tear_down_repo, self._stub_repo_git)
- self._stub_repo_dw = _StubRepo("shallow-dw")
- self.addCleanup(tear_down_repo, self._stub_repo_dw)
-
- # shallow clone using stock git, then using dulwich
- run_git_or_fail(
- [
- "clone",
- "--mirror",
- "--depth=1",
- "--no-single-branch",
- "file://" + self._source_repo.path,
- self._stub_repo_git.path,
- ]
- )
-
- port = self._start_server(self._source_repo)
- run_git_or_fail(
- [
- "clone",
- "--mirror",
- "--depth=1",
- "--no-single-branch",
- self.url(port),
- self._stub_repo_dw.path,
- ]
- )
-
- # compare the two clones; they should be equal
- self.assertReposEqual(
- Repo(self._stub_repo_git.path), Repo(self._stub_repo_dw.path)
- )
-
- def test_fetch_same_depth_into_shallow_clone_from_dulwich(self):
- require_git_version(self.min_single_branch_version)
- self._source_repo = self.import_repo("server_new.export")
- self._stub_repo = _StubRepo("shallow")
- self.addCleanup(tear_down_repo, self._stub_repo)
- port = self._start_server(self._source_repo)
-
- # Fetch at depth 2
- run_git_or_fail(
- [
- "clone",
- "--mirror",
- "--depth=2",
- "--no-single-branch",
- self.url(port),
- self._stub_repo.path,
- ]
- )
- clone = self._stub_repo = Repo(self._stub_repo.path)
-
- # Fetching at the same depth is a no-op.
- run_git_or_fail(
- ["fetch", "--depth=2", self.url(port), *self.branch_args()],
- cwd=self._stub_repo.path,
- )
- expected_shallow = [
- b"94de09a530df27ac3bb613aaecdd539e0a0655e1",
- b"da5cd81e1883c62a25bb37c4d1f8ad965b29bf8d",
- ]
- self.assertEqual(expected_shallow, _get_shallow(clone))
- self.assertReposNotEqual(clone, self._source_repo)
-
- def test_fetch_full_depth_into_shallow_clone_from_dulwich(self):
- require_git_version(self.min_single_branch_version)
- self._source_repo = self.import_repo("server_new.export")
- self._stub_repo = _StubRepo("shallow")
- self.addCleanup(tear_down_repo, self._stub_repo)
- port = self._start_server(self._source_repo)
-
- # Fetch at depth 2
- run_git_or_fail(
- [
- "clone",
- "--mirror",
- "--depth=2",
- "--no-single-branch",
- self.url(port),
- self._stub_repo.path,
- ]
- )
- clone = self._stub_repo = Repo(self._stub_repo.path)
-
- # Fetching at the same depth is a no-op.
- run_git_or_fail(
- ["fetch", "--depth=2", self.url(port), *self.branch_args()],
- cwd=self._stub_repo.path,
- )
-
- # The whole repo only has depth 4, so it should equal server_new.
- run_git_or_fail(
- ["fetch", "--depth=4", self.url(port), *self.branch_args()],
- cwd=self._stub_repo.path,
- )
- self.assertEqual([], _get_shallow(clone))
- self.assertReposEqual(clone, self._source_repo)
-
- def test_fetch_from_dulwich_issue_88_standard(self):
- # Basically an integration test to see that the ACK/NAK
- # generation works on repos with common head.
- self._source_repo = self.import_repo("issue88_expect_ack_nak_server.export")
- self._client_repo = self.import_repo("issue88_expect_ack_nak_client.export")
- port = self._start_server(self._source_repo)
-
- run_git_or_fail(["fetch", self.url(port), "master"], cwd=self._client_repo.path)
- self.assertObjectStoreEqual(
- self._source_repo.object_store, self._client_repo.object_store
- )
-
- def test_fetch_from_dulwich_issue_88_alternative(self):
- # likewise, but the case where the two repos have no common parent
- self._source_repo = self.import_repo("issue88_expect_ack_nak_other.export")
- self._client_repo = self.import_repo("issue88_expect_ack_nak_client.export")
- port = self._start_server(self._source_repo)
-
- self.assertRaises(
- KeyError,
- self._client_repo.get_object,
- b"02a14da1fc1fc13389bbf32f0af7d8899f2b2323",
- )
- run_git_or_fail(["fetch", self.url(port), "master"], cwd=self._client_repo.path)
- self.assertEqual(
- b"commit",
- self._client_repo.get_object(
- b"02a14da1fc1fc13389bbf32f0af7d8899f2b2323"
- ).type_name,
- )
-
- def test_push_to_dulwich_issue_88_standard(self):
- # Same thing, but we reverse the role of the server/client
- # and do a push instead.
- self._source_repo = self.import_repo("issue88_expect_ack_nak_client.export")
- self._client_repo = self.import_repo("issue88_expect_ack_nak_server.export")
- port = self._start_server(self._source_repo)
-
- run_git_or_fail(["push", self.url(port), "master"], cwd=self._client_repo.path)
- self.assertReposEqual(self._source_repo, self._client_repo)
-
-
-# TODO(dborowitz): Come up with a better way of testing various permutations of
-# capabilities. The only reason it is the way it is now is that side-band-64k
-# was only recently introduced into git-receive-pack.
-class NoSideBand64kReceivePackHandler(ReceivePackHandler):
- """ReceivePackHandler that does not support side-band-64k."""
-
- @classmethod
- def capabilities(cls):
- return [
- c
- for c in ReceivePackHandler.capabilities()
- if c != CAPABILITY_SIDE_BAND_64K
- ]
-
-
-def ignore_error(error):
- """Check whether this error is safe to ignore."""
- (e_type, e_value, e_tb) = error
- return issubclass(e_type, socket.error) and e_value[0] in (
- errno.ECONNRESET,
- errno.EPIPE,
- )
blob - cdc7dd9934035eb6c1609920e52790b253ef1efe (mode 644)
blob + /dev/null
--- dulwich/tests/compat/test_client.py
+++ /dev/null
-# test_client.py -- Compatibility tests for git client.
-# Copyright (C) 2010 Google, Inc.
-#
-# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
-# General Public License as public by the Free Software Foundation; version 2.0
-# or (at your option) any later version. You can redistribute it and/or
-# modify it under the terms of either of these two licenses.
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# You should have received a copy of the licenses; if not, see
-# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
-# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
-# License, Version 2.0.
-#
-
-"""Compatibility tests between the Dulwich client and the cgit server."""
-
-import copy
-import http.server
-import os
-import select
-import signal
-import stat
-import subprocess
-import sys
-import tarfile
-import tempfile
-import threading
-from contextlib import suppress
-from io import BytesIO
-from urllib.parse import unquote
-
-from dulwich import client, file, index, objects, protocol, repo
-from dulwich.tests import SkipTest, expectedFailure
-
-from .utils import (
- _DEFAULT_GIT,
- CompatTestCase,
- check_for_daemon,
- import_repo_to_dir,
- rmtree_ro,
- run_git_or_fail,
-)
-
-if sys.platform == "win32":
- import ctypes
-
-
-class DulwichClientTestBase:
- """Tests for client/server compatibility."""
-
- def setUp(self):
- self.gitroot = os.path.dirname(
- import_repo_to_dir("server_new.export").rstrip(os.sep)
- )
- self.dest = os.path.join(self.gitroot, "dest")
- file.ensure_dir_exists(self.dest)
- run_git_or_fail(["init", "--quiet", "--bare"], cwd=self.dest)
-
- def tearDown(self):
- rmtree_ro(self.gitroot)
-
- def assertDestEqualsSrc(self):
- repo_dir = os.path.join(self.gitroot, "server_new.export")
- dest_repo_dir = os.path.join(self.gitroot, "dest")
- with repo.Repo(repo_dir) as src:
- with repo.Repo(dest_repo_dir) as dest:
- self.assertReposEqual(src, dest)
-
- def _client(self):
- raise NotImplementedError
-
- def _build_path(self):
- raise NotImplementedError
-
- def _do_send_pack(self):
- c = self._client()
- srcpath = os.path.join(self.gitroot, "server_new.export")
- with repo.Repo(srcpath) as src:
- sendrefs = dict(src.get_refs())
- del sendrefs[b"HEAD"]
- c.send_pack(
- self._build_path("/dest"),
- lambda _: sendrefs,
- src.generate_pack_data,
- )
-
- def test_send_pack(self):
- self._do_send_pack()
- self.assertDestEqualsSrc()
-
- def test_send_pack_nothing_to_send(self):
- self._do_send_pack()
- self.assertDestEqualsSrc()
- # nothing to send, but shouldn't raise either.
- self._do_send_pack()
-
- @staticmethod
- def _add_file(repo, tree_id, filename, contents):
- tree = repo[tree_id]
- blob = objects.Blob()
- blob.data = contents.encode("utf-8")
- repo.object_store.add_object(blob)
- tree.add(filename.encode("utf-8"), stat.S_IFREG | 0o644, blob.id)
- repo.object_store.add_object(tree)
- return tree.id
-
- def test_send_pack_from_shallow_clone(self):
- c = self._client()
- server_new_path = os.path.join(self.gitroot, "server_new.export")
- run_git_or_fail(["config", "http.uploadpack", "true"], cwd=server_new_path)
- run_git_or_fail(["config", "http.receivepack", "true"], cwd=server_new_path)
- remote_path = self._build_path("/server_new.export")
- with repo.Repo(self.dest) as local:
- result = c.fetch(remote_path, local, depth=1)
- for r in result.refs.items():
- local.refs.set_if_equals(r[0], None, r[1])
- tree_id = local[local.head()].tree
- for filename, contents in [
- ("bar", "bar contents"),
- ("zop", "zop contents"),
- ]:
- tree_id = self._add_file(local, tree_id, filename, contents)
- commit_id = local.do_commit(
- message=b"add " + filename.encode("utf-8"),
- committer=b"Joe Example <joe@example.com>",
- tree=tree_id,
- )
- sendrefs = dict(local.get_refs())
- del sendrefs[b"HEAD"]
- c.send_pack(remote_path, lambda _: sendrefs, local.generate_pack_data)
- with repo.Repo(server_new_path) as remote:
- self.assertEqual(remote.head(), commit_id)
-
- def test_send_without_report_status(self):
- c = self._client()
- c._send_capabilities.remove(b"report-status")
- srcpath = os.path.join(self.gitroot, "server_new.export")
- with repo.Repo(srcpath) as src:
- sendrefs = dict(src.get_refs())
- del sendrefs[b"HEAD"]
- c.send_pack(
- self._build_path("/dest"),
- lambda _: sendrefs,
- src.generate_pack_data,
- )
- self.assertDestEqualsSrc()
-
- def make_dummy_commit(self, dest):
- b = objects.Blob.from_string(b"hi")
- dest.object_store.add_object(b)
- t = index.commit_tree(dest.object_store, [(b"hi", b.id, 0o100644)])
- c = objects.Commit()
- c.author = c.committer = b"Foo Bar <foo@example.com>"
- c.author_time = c.commit_time = 0
- c.author_timezone = c.commit_timezone = 0
- c.message = b"hi"
- c.tree = t
- dest.object_store.add_object(c)
- return c.id
-
- def disable_ff_and_make_dummy_commit(self):
- # disable non-fast-forward pushes to the server
- dest = repo.Repo(os.path.join(self.gitroot, "dest"))
- run_git_or_fail(
- ["config", "receive.denyNonFastForwards", "true"], cwd=dest.path
- )
- commit_id = self.make_dummy_commit(dest)
- return dest, commit_id
-
- def compute_send(self, src):
- sendrefs = dict(src.get_refs())
- del sendrefs[b"HEAD"]
- return sendrefs, src.generate_pack_data
-
- def test_send_pack_one_error(self):
- dest, dummy_commit = self.disable_ff_and_make_dummy_commit()
- dest.refs[b"refs/heads/master"] = dummy_commit
- repo_dir = os.path.join(self.gitroot, "server_new.export")
- with repo.Repo(repo_dir) as src:
- sendrefs, gen_pack = self.compute_send(src)
- c = self._client()
- result = c.send_pack(
- self._build_path("/dest"), lambda _: sendrefs, gen_pack
- )
- self.assertEqual(
- {
- b"refs/heads/branch": None,
- b"refs/heads/master": "non-fast-forward",
- },
- result.ref_status,
- )
-
- def test_send_pack_multiple_errors(self):
- dest, dummy = self.disable_ff_and_make_dummy_commit()
- # set up for two non-ff errors
- branch, master = b"refs/heads/branch", b"refs/heads/master"
- dest.refs[branch] = dest.refs[master] = dummy
- repo_dir = os.path.join(self.gitroot, "server_new.export")
- with repo.Repo(repo_dir) as src:
- sendrefs, gen_pack = self.compute_send(src)
- c = self._client()
- result = c.send_pack(
- self._build_path("/dest"), lambda _: sendrefs, gen_pack
- )
- self.assertEqual(
- {branch: "non-fast-forward", master: "non-fast-forward"},
- result.ref_status,
- )
-
- def test_archive(self):
- c = self._client()
- f = BytesIO()
- c.archive(self._build_path("/server_new.export"), b"HEAD", f.write)
- f.seek(0)
- tf = tarfile.open(fileobj=f)
- self.assertEqual(["baz", "foo"], tf.getnames())
-
- def test_fetch_pack(self):
- c = self._client()
- with repo.Repo(os.path.join(self.gitroot, "dest")) as dest:
- result = c.fetch(self._build_path("/server_new.export"), dest)
- for r in result.refs.items():
- dest.refs.set_if_equals(r[0], None, r[1])
- self.assertDestEqualsSrc()
-
- def test_fetch_pack_depth(self):
- c = self._client()
- with repo.Repo(os.path.join(self.gitroot, "dest")) as dest:
- result = c.fetch(self._build_path("/server_new.export"), dest, depth=1)
- for r in result.refs.items():
- dest.refs.set_if_equals(r[0], None, r[1])
- self.assertEqual(
- dest.get_shallow(),
- {
- b"35e0b59e187dd72a0af294aedffc213eaa4d03ff",
- b"514dc6d3fbfe77361bcaef320c4d21b72bc10be9",
- },
- )
-
- def test_repeat(self):
- c = self._client()
- with repo.Repo(os.path.join(self.gitroot, "dest")) as dest:
- result = c.fetch(self._build_path("/server_new.export"), dest)
- for r in result.refs.items():
- dest.refs.set_if_equals(r[0], None, r[1])
- self.assertDestEqualsSrc()
- result = c.fetch(self._build_path("/server_new.export"), dest)
- for r in result.refs.items():
- dest.refs.set_if_equals(r[0], None, r[1])
- self.assertDestEqualsSrc()
-
- def test_fetch_empty_pack(self):
- c = self._client()
- with repo.Repo(os.path.join(self.gitroot, "dest")) as dest:
- result = c.fetch(self._build_path("/server_new.export"), dest)
- for r in result.refs.items():
- dest.refs.set_if_equals(r[0], None, r[1])
- self.assertDestEqualsSrc()
-
- def dw(refs, **kwargs):
- return list(refs.values())
-
- result = c.fetch(
- self._build_path("/server_new.export"),
- dest,
- determine_wants=dw,
- )
- for r in result.refs.items():
- dest.refs.set_if_equals(r[0], None, r[1])
- self.assertDestEqualsSrc()
-
- def test_incremental_fetch_pack(self):
- self.test_fetch_pack()
- dest, dummy = self.disable_ff_and_make_dummy_commit()
- dest.refs[b"refs/heads/master"] = dummy
- c = self._client()
- repo_dir = os.path.join(self.gitroot, "server_new.export")
- with repo.Repo(repo_dir) as dest:
- result = c.fetch(self._build_path("/dest"), dest)
- for r in result.refs.items():
- dest.refs.set_if_equals(r[0], None, r[1])
- self.assertDestEqualsSrc()
-
- def test_fetch_pack_no_side_band_64k(self):
- c = self._client()
- c._fetch_capabilities.remove(b"side-band-64k")
- with repo.Repo(os.path.join(self.gitroot, "dest")) as dest:
- result = c.fetch(self._build_path("/server_new.export"), dest)
- for r in result.refs.items():
- dest.refs.set_if_equals(r[0], None, r[1])
- self.assertDestEqualsSrc()
-
- def test_fetch_pack_zero_sha(self):
- # zero sha1s are already present on the client, and should
- # be ignored
- c = self._client()
- with repo.Repo(os.path.join(self.gitroot, "dest")) as dest:
- result = c.fetch(
- self._build_path("/server_new.export"),
- dest,
- lambda refs, **kwargs: [protocol.ZERO_SHA],
- )
- for r in result.refs.items():
- dest.refs.set_if_equals(r[0], None, r[1])
-
- def test_send_remove_branch(self):
- with repo.Repo(os.path.join(self.gitroot, "dest")) as dest:
- dummy_commit = self.make_dummy_commit(dest)
- dest.refs[b"refs/heads/master"] = dummy_commit
- dest.refs[b"refs/heads/abranch"] = dummy_commit
- sendrefs = dict(dest.refs)
- sendrefs[b"refs/heads/abranch"] = b"00" * 20
- del sendrefs[b"HEAD"]
-
- def gen_pack(have, want, ofs_delta=False, progress=None):
- return 0, []
-
- c = self._client()
- self.assertEqual(dest.refs[b"refs/heads/abranch"], dummy_commit)
- c.send_pack(self._build_path("/dest"), lambda _: sendrefs, gen_pack)
- self.assertNotIn(b"refs/heads/abranch", dest.refs)
-
- def test_send_new_branch_empty_pack(self):
- with repo.Repo(os.path.join(self.gitroot, "dest")) as dest:
- dummy_commit = self.make_dummy_commit(dest)
- dest.refs[b"refs/heads/master"] = dummy_commit
- dest.refs[b"refs/heads/abranch"] = dummy_commit
- sendrefs = {b"refs/heads/bbranch": dummy_commit}
-
- def gen_pack(have, want, ofs_delta=False, progress=None):
- return 0, []
-
- c = self._client()
- self.assertEqual(dest.refs[b"refs/heads/abranch"], dummy_commit)
- c.send_pack(self._build_path("/dest"), lambda _: sendrefs, gen_pack)
- self.assertEqual(dummy_commit, dest.refs[b"refs/heads/abranch"])
-
- def test_get_refs(self):
- c = self._client()
- refs = c.get_refs(self._build_path("/server_new.export"))
-
- repo_dir = os.path.join(self.gitroot, "server_new.export")
- with repo.Repo(repo_dir) as dest:
- self.assertDictEqual(dest.refs.as_dict(), refs)
-
-
-class DulwichTCPClientTest(CompatTestCase, DulwichClientTestBase):
- def setUp(self):
- CompatTestCase.setUp(self)
- DulwichClientTestBase.setUp(self)
- if check_for_daemon(limit=1):
- raise SkipTest(
- "git-daemon was already running on port %s" % protocol.TCP_GIT_PORT
- )
- fd, self.pidfile = tempfile.mkstemp(
- prefix="dulwich-test-git-client", suffix=".pid"
- )
- os.fdopen(fd).close()
- args = [
- _DEFAULT_GIT,
- "daemon",
- "--verbose",
- "--export-all",
- "--pid-file=%s" % self.pidfile,
- "--base-path=%s" % self.gitroot,
- "--enable=receive-pack",
- "--enable=upload-archive",
- "--listen=localhost",
- "--reuseaddr",
- self.gitroot,
- ]
- self.process = subprocess.Popen(
- args,
- cwd=self.gitroot,
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE,
- )
- if not check_for_daemon():
- raise SkipTest("git-daemon failed to start")
-
- def tearDown(self):
- with open(self.pidfile) as f:
- pid = int(f.read().strip())
- if sys.platform == "win32":
- PROCESS_TERMINATE = 1
- handle = ctypes.windll.kernel32.OpenProcess(PROCESS_TERMINATE, False, pid)
- ctypes.windll.kernel32.TerminateProcess(handle, -1)
- ctypes.windll.kernel32.CloseHandle(handle)
- else:
- with suppress(OSError):
- os.kill(pid, signal.SIGKILL)
- os.unlink(self.pidfile)
- self.process.wait()
- self.process.stdout.close()
- self.process.stderr.close()
- DulwichClientTestBase.tearDown(self)
- CompatTestCase.tearDown(self)
-
- def _client(self):
- return client.TCPGitClient("localhost")
-
- def _build_path(self, path):
- return path
-
- if sys.platform == "win32":
-
- @expectedFailure
- def test_fetch_pack_no_side_band_64k(self):
- DulwichClientTestBase.test_fetch_pack_no_side_band_64k(self)
-
- def test_send_remove_branch(self):
- # This test fails intermittently on my machine, probably due to some sort
- # of race condition. Probably also related to #1015
- self.skipTest("skip flaky test; see #1015")
-
-
-class TestSSHVendor:
- @staticmethod
- def run_command(
- host,
- command,
- username=None,
- port=None,
- password=None,
- key_filename=None,
- ):
- cmd, path = command.split(" ")
- cmd = cmd.split("-", 1)
- path = path.replace("'", "")
- p = subprocess.Popen(
- [*cmd, path],
- bufsize=0,
- stdin=subprocess.PIPE,
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE,
- )
- return client.SubprocessWrapper(p)
-
-
-class DulwichMockSSHClientTest(CompatTestCase, DulwichClientTestBase):
- def setUp(self):
- CompatTestCase.setUp(self)
- DulwichClientTestBase.setUp(self)
- self.real_vendor = client.get_ssh_vendor
- client.get_ssh_vendor = TestSSHVendor
-
- def tearDown(self):
- DulwichClientTestBase.tearDown(self)
- CompatTestCase.tearDown(self)
- client.get_ssh_vendor = self.real_vendor
-
- def _client(self):
- return client.SSHGitClient("localhost")
-
- def _build_path(self, path):
- return self.gitroot + path
-
-
-class DulwichSubprocessClientTest(CompatTestCase, DulwichClientTestBase):
- def setUp(self):
- CompatTestCase.setUp(self)
- DulwichClientTestBase.setUp(self)
-
- def tearDown(self):
- DulwichClientTestBase.tearDown(self)
- CompatTestCase.tearDown(self)
-
- def _client(self):
- return client.SubprocessGitClient()
-
- def _build_path(self, path):
- return self.gitroot + path
-
-
-class GitHTTPRequestHandler(http.server.SimpleHTTPRequestHandler):
- """HTTP Request handler that calls out to 'git http-backend'."""
-
- # Make rfile unbuffered -- we need to read one line and then pass
- # the rest to a subprocess, so we can't use buffered input.
- rbufsize = 0
-
- def do_POST(self):
- self.run_backend()
-
- def do_GET(self):
- self.run_backend()
-
- def send_head(self):
- return self.run_backend()
-
- def log_request(self, code="-", size="-"):
- # Let's be quiet, the test suite is noisy enough already
- pass
-
- def run_backend(self):
- """Call out to git http-backend."""
- # Based on CGIHTTPServer.CGIHTTPRequestHandler.run_cgi:
- # Copyright (c) 2001-2010 Python Software Foundation;
- # All Rights Reserved
- # Licensed under the Python Software Foundation License.
- rest = self.path
- # find an explicit query string, if present.
- i = rest.rfind("?")
- if i >= 0:
- rest, query = rest[:i], rest[i + 1 :]
- else:
- query = ""
-
- env = copy.deepcopy(os.environ)
- env["SERVER_SOFTWARE"] = self.version_string()
- env["SERVER_NAME"] = self.server.server_name
- env["GATEWAY_INTERFACE"] = "CGI/1.1"
- env["SERVER_PROTOCOL"] = self.protocol_version
- env["SERVER_PORT"] = str(self.server.server_port)
- env["GIT_PROJECT_ROOT"] = self.server.root_path
- env["GIT_HTTP_EXPORT_ALL"] = "1"
- env["REQUEST_METHOD"] = self.command
- uqrest = unquote(rest)
- env["PATH_INFO"] = uqrest
- env["SCRIPT_NAME"] = "/"
- if query:
- env["QUERY_STRING"] = query
- host = self.address_string()
- if host != self.client_address[0]:
- env["REMOTE_HOST"] = host
- env["REMOTE_ADDR"] = self.client_address[0]
- authorization = self.headers.get("authorization")
- if authorization:
- authorization = authorization.split()
- if len(authorization) == 2:
- import base64
- import binascii
-
- env["AUTH_TYPE"] = authorization[0]
- if authorization[0].lower() == "basic":
- try:
- authorization = base64.decodestring(authorization[1])
- except binascii.Error:
- pass
- else:
- authorization = authorization.split(":")
- if len(authorization) == 2:
- env["REMOTE_USER"] = authorization[0]
- # XXX REMOTE_IDENT
- content_type = self.headers.get("content-type")
- if content_type:
- env["CONTENT_TYPE"] = content_type
- length = self.headers.get("content-length")
- if length:
- env["CONTENT_LENGTH"] = length
- referer = self.headers.get("referer")
- if referer:
- env["HTTP_REFERER"] = referer
- accept = []
- for line in self.headers.getallmatchingheaders("accept"):
- if line[:1] in "\t\n\r ":
- accept.append(line.strip())
- else:
- accept = accept + line[7:].split(",")
- env["HTTP_ACCEPT"] = ",".join(accept)
- ua = self.headers.get("user-agent")
- if ua:
- env["HTTP_USER_AGENT"] = ua
- co = self.headers.get("cookie")
- if co:
- env["HTTP_COOKIE"] = co
- # XXX Other HTTP_* headers
- # Since we're setting the env in the parent, provide empty
- # values to override previously set values
- for k in (
- "QUERY_STRING",
- "REMOTE_HOST",
- "CONTENT_LENGTH",
- "HTTP_USER_AGENT",
- "HTTP_COOKIE",
- "HTTP_REFERER",
- ):
- env.setdefault(k, "")
-
- self.wfile.write(b"HTTP/1.1 200 Script output follows\r\n")
- self.wfile.write(("Server: %s\r\n" % self.server.server_name).encode("ascii"))
- self.wfile.write(("Date: %s\r\n" % self.date_time_string()).encode("ascii"))
-
- decoded_query = query.replace("+", " ")
-
- try:
- nbytes = int(length)
- except (TypeError, ValueError):
- nbytes = -1
- if self.command.lower() == "post":
- if nbytes > 0:
- data = self.rfile.read(nbytes)
- elif self.headers.get("transfer-encoding") == "chunked":
- chunks = []
- while True:
- line = self.rfile.readline()
- length = int(line.rstrip(), 16)
- chunk = self.rfile.read(length + 2)
- chunks.append(chunk[:-2])
- if length == 0:
- break
- data = b"".join(chunks)
- env["CONTENT_LENGTH"] = str(len(data))
- else:
- raise AssertionError
- else:
- data = None
- env["CONTENT_LENGTH"] = "0"
- # throw away additional data [see bug #427345]
- while select.select([self.rfile._sock], [], [], 0)[0]:
- if not self.rfile._sock.recv(1):
- break
- args = ["http-backend"]
- if "=" not in decoded_query:
- args.append(decoded_query)
- stdout = run_git_or_fail(args, input=data, env=env, stderr=subprocess.PIPE)
- self.wfile.write(stdout)
-
-
-class HTTPGitServer(http.server.HTTPServer):
- allow_reuse_address = True
-
- def __init__(self, server_address, root_path) -> None:
- http.server.HTTPServer.__init__(self, server_address, GitHTTPRequestHandler)
- self.root_path = root_path
- self.server_name = "localhost"
-
- def get_url(self):
- return f"http://{self.server_name}:{self.server_port}/"
-
-
-class DulwichHttpClientTest(CompatTestCase, DulwichClientTestBase):
- min_git_version = (1, 7, 0, 2)
-
- def setUp(self):
- CompatTestCase.setUp(self)
- DulwichClientTestBase.setUp(self)
- self._httpd = HTTPGitServer(("localhost", 0), self.gitroot)
- self.addCleanup(self._httpd.shutdown)
- threading.Thread(target=self._httpd.serve_forever).start()
- run_git_or_fail(["config", "http.uploadpack", "true"], cwd=self.dest)
- run_git_or_fail(["config", "http.receivepack", "true"], cwd=self.dest)
-
- def tearDown(self):
- DulwichClientTestBase.tearDown(self)
- CompatTestCase.tearDown(self)
- self._httpd.shutdown()
- self._httpd.socket.close()
-
- def _client(self):
- return client.HttpGitClient(self._httpd.get_url())
-
- def _build_path(self, path):
- return path
-
- def test_archive(self):
- raise SkipTest("exporting archives not supported over http")
blob - 351351a5daff60b1370ab5c8b752c15a7560e13c (mode 644)
blob + /dev/null
--- dulwich/tests/compat/test_pack.py
+++ /dev/null
-# test_pack.py -- Compatibility tests for git packs.
-# Copyright (C) 2010 Google, Inc.
-#
-# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
-# General Public License as public by the Free Software Foundation; version 2.0
-# or (at your option) any later version. You can redistribute it and/or
-# modify it under the terms of either of these two licenses.
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# You should have received a copy of the licenses; if not, see
-# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
-# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
-# License, Version 2.0.
-#
-
-"""Compatibility tests for git packs."""
-
-import binascii
-import os
-import re
-import shutil
-import tempfile
-
-from dulwich.tests import SkipTest
-
-from ...objects import Blob
-from ...pack import write_pack
-from ..test_pack import PackTests, a_sha, pack1_sha
-from .utils import require_git_version, run_git_or_fail
-
-_NON_DELTA_RE = re.compile(b"non delta: (?P<non_delta>\\d+) objects")
-
-
-def _git_verify_pack_object_list(output):
- pack_shas = set()
- for line in output.splitlines():
- sha = line[:40]
- try:
- binascii.unhexlify(sha)
- except (TypeError, binascii.Error):
- continue # non-sha line
- pack_shas.add(sha)
- return pack_shas
-
-
-class TestPack(PackTests):
- """Compatibility tests for reading and writing pack files."""
-
- def setUp(self):
- require_git_version((1, 5, 0))
- super().setUp()
- self._tempdir = tempfile.mkdtemp()
- self.addCleanup(shutil.rmtree, self._tempdir)
-
- def test_copy(self):
- with self.get_pack(pack1_sha) as origpack:
- self.assertSucceeds(origpack.index.check)
- pack_path = os.path.join(self._tempdir, "Elch")
- write_pack(pack_path, origpack.pack_tuples())
- output = run_git_or_fail(["verify-pack", "-v", pack_path])
- orig_shas = {o.id for o in origpack.iterobjects()}
- self.assertEqual(orig_shas, _git_verify_pack_object_list(output))
-
- def test_deltas_work(self):
- with self.get_pack(pack1_sha) as orig_pack:
- orig_blob = orig_pack[a_sha]
- new_blob = Blob()
- new_blob.data = orig_blob.data + b"x"
- all_to_pack = [(o, None) for o in orig_pack.iterobjects()] + [
- (new_blob, None)
- ]
- pack_path = os.path.join(self._tempdir, "pack_with_deltas")
- write_pack(pack_path, all_to_pack, deltify=True)
- output = run_git_or_fail(["verify-pack", "-v", pack_path])
- self.assertEqual(
- {x[0].id for x in all_to_pack},
- _git_verify_pack_object_list(output),
- )
- # We specifically made a new blob that should be a delta
- # against the blob a_sha, so make sure we really got only 3
- # non-delta objects:
- got_non_delta = int(_NON_DELTA_RE.search(output).group("non_delta"))
- self.assertEqual(
- 3,
- got_non_delta,
- "Expected 3 non-delta objects, got %d" % got_non_delta,
- )
-
- def test_delta_medium_object(self):
- # This tests an object set that will have a copy operation
- # 2**20 in size.
- with self.get_pack(pack1_sha) as orig_pack:
- orig_blob = orig_pack[a_sha]
- new_blob = Blob()
- new_blob.data = orig_blob.data + (b"x" * 2**20)
- new_blob_2 = Blob()
- new_blob_2.data = new_blob.data + b"y"
- all_to_pack = [
- *list(orig_pack.pack_tuples()),
- (new_blob, None),
- (new_blob_2, None),
- ]
- pack_path = os.path.join(self._tempdir, "pack_with_deltas")
- write_pack(pack_path, all_to_pack, deltify=True)
- output = run_git_or_fail(["verify-pack", "-v", pack_path])
- self.assertEqual(
- {x[0].id for x in all_to_pack},
- _git_verify_pack_object_list(output),
- )
- # We specifically made a new blob that should be a delta
- # against the blob a_sha, so make sure we really got only 3
- # non-delta objects:
- got_non_delta = int(_NON_DELTA_RE.search(output).group("non_delta"))
- self.assertEqual(
- 3,
- got_non_delta,
- "Expected 3 non-delta objects, got %d" % got_non_delta,
- )
- # We expect one object to have a delta chain length of two
- # (new_blob_2), so let's verify that actually happens:
- self.assertIn(b"chain length = 2", output)
-
- # This test is SUPER slow: over 80 seconds on a 2012-era
- # laptop. This is because SequenceMatcher is worst-case quadratic
- # on the input size. It's impractical to produce deltas for
- # objects this large, but it's still worth doing the right thing
- # when it happens.
- def test_delta_large_object(self):
- # This tests an object set that will have a copy operation
- # 2**25 in size. This is a copy large enough that it requires
- # two copy operations in git's binary delta format.
- raise SkipTest("skipping slow, large test")
- with self.get_pack(pack1_sha) as orig_pack:
- new_blob = Blob()
- new_blob.data = "big blob" + ("x" * 2**25)
- new_blob_2 = Blob()
- new_blob_2.data = new_blob.data + "y"
- all_to_pack = [
- *list(orig_pack.pack_tuples()),
- (new_blob, None),
- (new_blob_2, None),
- ]
- pack_path = os.path.join(self._tempdir, "pack_with_deltas")
- write_pack(pack_path, all_to_pack, deltify=True)
- output = run_git_or_fail(["verify-pack", "-v", pack_path])
- self.assertEqual(
- {x[0].id for x in all_to_pack},
- _git_verify_pack_object_list(output),
- )
- # We specifically made a new blob that should be a delta
- # against the blob a_sha, so make sure we really got only 4
- # non-delta objects:
- got_non_delta = int(_NON_DELTA_RE.search(output).group("non_delta"))
- self.assertEqual(
- 4,
- got_non_delta,
- "Expected 4 non-delta objects, got %d" % got_non_delta,
- )
blob - aa33f0db73c32d34c612199b7b85a846afe9f804 (mode 644)
blob + /dev/null
--- dulwich/tests/compat/test_patch.py
+++ /dev/null
-# test_patch.py -- test patch compatibility with CGit
-# Copyright (C) 2019 Boris Feld <boris@comet.ml>
-#
-# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
-# General Public License as public by the Free Software Foundation; version 2.0
-# or (at your option) any later version. You can redistribute it and/or
-# modify it under the terms of either of these two licenses.
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# You should have received a copy of the licenses; if not, see
-# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
-# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
-# License, Version 2.0.
-#
-
-"""Tests related to patch compatibility with CGit."""
-
-import os
-import shutil
-import tempfile
-from io import BytesIO
-
-from dulwich import porcelain
-
-from ...repo import Repo
-from .utils import CompatTestCase, run_git_or_fail
-
-
-class CompatPatchTestCase(CompatTestCase):
- def setUp(self):
- super().setUp()
- self.test_dir = tempfile.mkdtemp()
- self.addCleanup(shutil.rmtree, self.test_dir)
- self.repo_path = os.path.join(self.test_dir, "repo")
- self.repo = Repo.init(self.repo_path, mkdir=True)
- self.addCleanup(self.repo.close)
-
- def test_patch_apply(self):
- # Prepare the repository
-
- # Create some files and commit them
- file_list = ["to_exists", "to_modify", "to_delete"]
- for file in file_list:
- file_path = os.path.join(self.repo_path, file)
-
- # Touch the files
- with open(file_path, "w"):
- pass
-
- self.repo.stage(file_list)
-
- first_commit = self.repo.do_commit(b"The first commit")
-
- # Make a copy of the repository so we can apply the diff later
- copy_path = os.path.join(self.test_dir, "copy")
- shutil.copytree(self.repo_path, copy_path)
-
- # Do some changes
- with open(os.path.join(self.repo_path, "to_modify"), "w") as f:
- f.write("Modified!")
-
- os.remove(os.path.join(self.repo_path, "to_delete"))
-
- with open(os.path.join(self.repo_path, "to_add"), "w"):
- pass
-
- self.repo.stage(["to_modify", "to_delete", "to_add"])
-
- second_commit = self.repo.do_commit(b"The second commit")
-
- # Get the patch
- first_tree = self.repo[first_commit].tree
- second_tree = self.repo[second_commit].tree
-
- outstream = BytesIO()
- porcelain.diff_tree(
- self.repo.path, first_tree, second_tree, outstream=outstream
- )
-
- # Save it on disk
- patch_path = os.path.join(self.test_dir, "patch.patch")
- with open(patch_path, "wb") as patch:
- patch.write(outstream.getvalue())
-
- # And try to apply it to the copy directory
- git_command = ["-C", copy_path, "apply", patch_path]
- run_git_or_fail(git_command)
-
- # And now check that the files contents are exactly the same between
- # the two repositories
- original_files = set(os.listdir(self.repo_path))
- new_files = set(os.listdir(copy_path))
-
- # Check that we have the exact same files in both repositories
- self.assertEqual(original_files, new_files)
-
- for file in original_files:
- if file == ".git":
- continue
-
- original_file_path = os.path.join(self.repo_path, file)
- copy_file_path = os.path.join(copy_path, file)
-
- self.assertTrue(os.path.isfile(copy_file_path))
-
- with open(original_file_path, "rb") as original_file:
- original_content = original_file.read()
-
- with open(copy_file_path, "rb") as copy_file:
- copy_content = copy_file.read()
-
- self.assertEqual(original_content, copy_content)
blob - 5f81e137166827f3e0c0019e93c17d4972c548c4 (mode 644)
blob + /dev/null
--- dulwich/tests/compat/test_porcelain.py
+++ /dev/null
-# test_porcelain .py -- Tests for dulwich.porcelain/CGit compatibility
-# Copyright (C) 2010 Google, Inc.
-#
-# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
-# General Public License as public by the Free Software Foundation; version 2.0
-# or (at your option) any later version. You can redistribute it and/or
-# modify it under the terms of either of these two licenses.
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# You should have received a copy of the licenses; if not, see
-# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
-# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
-# License, Version 2.0.
-#
-
-"""Compatibility tests for dulwich.porcelain."""
-
-import os
-import platform
-import sys
-from unittest import skipIf
-
-from dulwich import porcelain
-
-from ..test_porcelain import PorcelainGpgTestCase
-from ..utils import build_commit_graph
-from .utils import CompatTestCase, run_git_or_fail
-
-
-@skipIf(
- platform.python_implementation() == "PyPy" or sys.platform == "win32",
- "gpgme not easily available or supported on Windows and PyPy",
-)
-class TagCreateSignTestCase(PorcelainGpgTestCase, CompatTestCase):
- def setUp(self):
- super().setUp()
-
- def test_sign(self):
- # Test that dulwich signatures can be verified by CGit
- c1, c2, c3 = build_commit_graph(
- self.repo.object_store, [[1], [2, 1], [3, 1, 2]]
- )
- self.repo.refs[b"HEAD"] = c3.id
- cfg = self.repo.get_config()
- cfg.set(("user",), "signingKey", PorcelainGpgTestCase.DEFAULT_KEY_ID)
- self.import_default_key()
-
- porcelain.tag_create(
- self.repo.path,
- b"tryme",
- b"foo <foo@bar.com>",
- b"bar",
- annotated=True,
- sign=True,
- )
-
- run_git_or_fail(
- [f"--git-dir={self.repo.controldir()}", "tag", "-v", "tryme"],
- env={"GNUPGHOME": os.environ["GNUPGHOME"]},
- )
-
- def test_verify(self):
- # Test that CGit signatures can be verified by dulwich
- c1, c2, c3 = build_commit_graph(
- self.repo.object_store, [[1], [2, 1], [3, 1, 2]]
- )
- self.repo.refs[b"HEAD"] = c3.id
- self.import_default_key()
-
- run_git_or_fail(
- [
- f"--git-dir={self.repo.controldir()}",
- "tag",
- "-u",
- PorcelainGpgTestCase.DEFAULT_KEY_ID,
- "-m",
- "foo",
- "verifyme",
- ],
- env={
- "GNUPGHOME": os.environ["GNUPGHOME"],
- "GIT_COMMITTER_NAME": "Joe Example",
- "GIT_COMMITTER_EMAIL": "joe@example.com",
- },
- )
- tag = self.repo[b"refs/tags/verifyme"]
- self.assertNotEqual(tag.signature, None)
- tag.verify()
blob - 3ab51a67ec7be9b6796862f5353c6fb9ef7d183b (mode 644)
blob + /dev/null
--- dulwich/tests/compat/test_repository.py
+++ /dev/null
-# test_repo.py -- Git repo compatibility tests
-# Copyright (C) 2010 Google, Inc.
-#
-# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
-# General Public License as public by the Free Software Foundation; version 2.0
-# or (at your option) any later version. You can redistribute it and/or
-# modify it under the terms of either of these two licenses.
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# You should have received a copy of the licenses; if not, see
-# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
-# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
-# License, Version 2.0.
-#
-
-"""Compatibility tests for dulwich repositories."""
-
-import os
-import tempfile
-from io import BytesIO
-from itertools import chain
-
-from ...objects import hex_to_sha
-from ...repo import Repo, check_ref_format
-from .utils import CompatTestCase, require_git_version, rmtree_ro, run_git_or_fail
-
-
-class ObjectStoreTestCase(CompatTestCase):
- """Tests for git repository compatibility."""
-
- def setUp(self):
- super().setUp()
- self._repo = self.import_repo("server_new.export")
-
- def _run_git(self, args):
- return run_git_or_fail(args, cwd=self._repo.path)
-
- def _parse_refs(self, output):
- refs = {}
- for line in BytesIO(output):
- fields = line.rstrip(b"\n").split(b" ")
- self.assertEqual(3, len(fields))
- refname, type_name, sha = fields
- check_ref_format(refname[5:])
- hex_to_sha(sha)
- refs[refname] = (type_name, sha)
- return refs
-
- def _parse_objects(self, output):
- return {s.rstrip(b"\n").split(b" ")[0] for s in BytesIO(output)}
-
- def test_bare(self):
- self.assertTrue(self._repo.bare)
- self.assertFalse(os.path.exists(os.path.join(self._repo.path, ".git")))
-
- def test_head(self):
- output = self._run_git(["rev-parse", "HEAD"])
- head_sha = output.rstrip(b"\n")
- hex_to_sha(head_sha)
- self.assertEqual(head_sha, self._repo.refs[b"HEAD"])
-
- def test_refs(self):
- output = self._run_git(
- ["for-each-ref", "--format=%(refname) %(objecttype) %(objectname)"]
- )
- expected_refs = self._parse_refs(output)
-
- actual_refs = {}
- for refname, sha in self._repo.refs.as_dict().items():
- if refname == b"HEAD":
- continue # handled in test_head
- obj = self._repo[sha]
- self.assertEqual(sha, obj.id)
- actual_refs[refname] = (obj.type_name, obj.id)
- self.assertEqual(expected_refs, actual_refs)
-
- # TODO(dborowitz): peeled ref tests
-
- def _get_loose_shas(self):
- output = self._run_git(["rev-list", "--all", "--objects", "--unpacked"])
- return self._parse_objects(output)
-
- def _get_all_shas(self):
- output = self._run_git(["rev-list", "--all", "--objects"])
- return self._parse_objects(output)
-
- def assertShasMatch(self, expected_shas, actual_shas_iter):
- actual_shas = set()
- for sha in actual_shas_iter:
- obj = self._repo[sha]
- self.assertEqual(sha, obj.id)
- actual_shas.add(sha)
- self.assertEqual(expected_shas, actual_shas)
-
- def test_loose_objects(self):
- # TODO(dborowitz): This is currently not very useful since
- # fast-imported repos only contained packed objects.
- expected_shas = self._get_loose_shas()
- self.assertShasMatch(
- expected_shas, self._repo.object_store._iter_loose_objects()
- )
-
- def test_packed_objects(self):
- expected_shas = self._get_all_shas() - self._get_loose_shas()
- self.assertShasMatch(
- expected_shas, chain.from_iterable(self._repo.object_store.packs)
- )
-
- def test_all_objects(self):
- expected_shas = self._get_all_shas()
- self.assertShasMatch(expected_shas, iter(self._repo.object_store))
-
-
-class WorkingTreeTestCase(ObjectStoreTestCase):
- """Test for compatibility with git-worktree."""
-
- min_git_version = (2, 5, 0)
-
- def create_new_worktree(self, repo_dir, branch):
- """Create a new worktree using git-worktree.
-
- Args:
- repo_dir: The directory of the main working tree.
- branch: The branch or commit to checkout in the new worktree.
-
- Returns: The path to the new working tree.
- """
- temp_dir = tempfile.mkdtemp()
- run_git_or_fail(["worktree", "add", temp_dir, branch], cwd=repo_dir)
- self.addCleanup(rmtree_ro, temp_dir)
- return temp_dir
-
- def setUp(self):
- super().setUp()
- self._worktree_path = self.create_new_worktree(self._repo.path, "branch")
- self._worktree_repo = Repo(self._worktree_path)
- self.addCleanup(self._worktree_repo.close)
- self._mainworktree_repo = self._repo
- self._number_of_working_tree = 2
- self._repo = self._worktree_repo
-
- def test_refs(self):
- super().test_refs()
- self.assertEqual(
- self._mainworktree_repo.refs.allkeys(), self._repo.refs.allkeys()
- )
-
- def test_head_equality(self):
- self.assertNotEqual(
- self._repo.refs[b"HEAD"], self._mainworktree_repo.refs[b"HEAD"]
- )
-
- def test_bare(self):
- self.assertFalse(self._repo.bare)
- self.assertTrue(os.path.isfile(os.path.join(self._repo.path, ".git")))
-
- def _parse_worktree_list(self, output):
- worktrees = []
- for line in BytesIO(output):
- fields = line.rstrip(b"\n").split()
- worktrees.append(tuple(f.decode() for f in fields))
- return worktrees
-
- def test_git_worktree_list(self):
- # 'git worktree list' was introduced in 2.7.0
- require_git_version((2, 7, 0))
- output = run_git_or_fail(["worktree", "list"], cwd=self._repo.path)
- worktrees = self._parse_worktree_list(output)
- self.assertEqual(len(worktrees), self._number_of_working_tree)
- self.assertEqual(worktrees[0][1], "(bare)")
- self.assertTrue(os.path.samefile(worktrees[0][0], self._mainworktree_repo.path))
-
- output = run_git_or_fail(["worktree", "list"], cwd=self._mainworktree_repo.path)
- worktrees = self._parse_worktree_list(output)
- self.assertEqual(len(worktrees), self._number_of_working_tree)
- self.assertEqual(worktrees[0][1], "(bare)")
- self.assertTrue(os.path.samefile(worktrees[0][0], self._mainworktree_repo.path))
-
- def test_git_worktree_config(self):
- """Test that git worktree config parsing matches the git CLI's behavior."""
- # Set some config value in the main repo using the git CLI
- require_git_version((2, 7, 0))
- test_name = "Jelmer"
- test_email = "jelmer@apache.org"
- run_git_or_fail(["config", "user.name", test_name], cwd=self._repo.path)
- run_git_or_fail(["config", "user.email", test_email], cwd=self._repo.path)
-
- worktree_cfg = self._worktree_repo.get_config()
- main_cfg = self._repo.get_config()
-
- # Assert that both the worktree repo and main repo have the same view of the config,
- # and that the config matches what we set with the git cli
- self.assertEqual(worktree_cfg, main_cfg)
- for c in [worktree_cfg, main_cfg]:
- self.assertEqual(test_name.encode(), c.get((b"user",), b"name"))
- self.assertEqual(test_email.encode(), c.get((b"user",), b"email"))
-
- # Read the config values in the worktree with the git cli and assert they match
- # the dulwich-parsed configs
- output_name = (
- run_git_or_fail(["config", "user.name"], cwd=self._mainworktree_repo.path)
- .decode()
- .rstrip("\n")
- )
- output_email = (
- run_git_or_fail(["config", "user.email"], cwd=self._mainworktree_repo.path)
- .decode()
- .rstrip("\n")
- )
- self.assertEqual(test_name, output_name)
- self.assertEqual(test_email, output_email)
-
-
-class InitNewWorkingDirectoryTestCase(WorkingTreeTestCase):
- """Test compatibility of Repo.init_new_working_directory."""
-
- min_git_version = (2, 5, 0)
-
- def setUp(self):
- super().setUp()
- self._other_worktree = self._repo
- worktree_repo_path = tempfile.mkdtemp()
- self.addCleanup(rmtree_ro, worktree_repo_path)
- self._repo = Repo._init_new_working_directory(
- worktree_repo_path, self._mainworktree_repo
- )
- self.addCleanup(self._repo.close)
- self._number_of_working_tree = 3
-
- def test_head_equality(self):
- self.assertEqual(
- self._repo.refs[b"HEAD"], self._mainworktree_repo.refs[b"HEAD"]
- )
-
- def test_bare(self):
- self.assertFalse(self._repo.bare)
- self.assertTrue(os.path.isfile(os.path.join(self._repo.path, ".git")))
blob - 238fb39342f76893e9fddcd2cd3d2bd14f625e92 (mode 644)
blob + /dev/null
--- dulwich/tests/compat/test_server.py
+++ /dev/null
-# test_server.py -- Compatibility tests for git server.
-# Copyright (C) 2010 Google, Inc.
-#
-# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
-# General Public License as public by the Free Software Foundation; version 2.0
-# or (at your option) any later version. You can redistribute it and/or
-# modify it under the terms of either of these two licenses.
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# You should have received a copy of the licenses; if not, see
-# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
-# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
-# License, Version 2.0.
-#
-
-"""Compatibility tests between Dulwich and the cgit server.
-
-Warning: these tests should be fairly stable, but when writing/debugging new
- tests, deadlocks may freeze the test process such that it cannot be
- Ctrl-C'ed. On POSIX systems, you can kill the tests with Ctrl-Z, "kill %".
-"""
-
-import os
-import sys
-import threading
-
-from dulwich.tests import skipIf
-
-from ...server import DictBackend, TCPGitServer
-from .server_utils import NoSideBand64kReceivePackHandler, ServerTests
-from .utils import CompatTestCase, require_git_version
-
-
-@skipIf(sys.platform == "win32", "Broken on windows, with very long fail time.")
-class GitServerTestCase(ServerTests, CompatTestCase):
- """Tests for client/server compatibility.
-
- This server test case does not use side-band-64k in git-receive-pack.
- """
-
- protocol = "git"
-
- def _handlers(self):
- return {b"git-receive-pack": NoSideBand64kReceivePackHandler}
-
- def _check_server(self, dul_server):
- receive_pack_handler_cls = dul_server.handlers[b"git-receive-pack"]
- caps = receive_pack_handler_cls.capabilities()
- self.assertNotIn(b"side-band-64k", caps)
-
- def _start_server(self, repo):
- backend = DictBackend({b"/": repo})
- dul_server = TCPGitServer(backend, b"localhost", 0, handlers=self._handlers())
- self._check_server(dul_server)
- self.addCleanup(dul_server.shutdown)
- self.addCleanup(dul_server.server_close)
- threading.Thread(target=dul_server.serve).start()
- self._server = dul_server
- _, port = self._server.socket.getsockname()
- return port
-
-
-@skipIf(sys.platform == "win32", "Broken on windows, with very long fail time.")
-class GitServerSideBand64kTestCase(GitServerTestCase):
- """Tests for client/server compatibility with side-band-64k support."""
-
- # side-band-64k in git-receive-pack was introduced in git 1.7.0.2
- min_git_version = (1, 7, 0, 2)
-
- def setUp(self):
- super().setUp()
- # side-band-64k is broken in the windows client.
- # https://github.com/msysgit/git/issues/101
- # Fix has landed for the 1.9.3 release.
- if os.name == "nt":
- require_git_version((1, 9, 3))
-
- def _handlers(self):
- return None # default handlers include side-band-64k
-
- def _check_server(self, server):
- receive_pack_handler_cls = server.handlers[b"git-receive-pack"]
- caps = receive_pack_handler_cls.capabilities()
- self.assertIn(b"side-band-64k", caps)
blob - 65afe5a36f31a56acb031dc859a0b33d987d3557 (mode 644)
blob + /dev/null
--- dulwich/tests/compat/test_utils.py
+++ /dev/null
-# test_utils.py -- Tests for git compatibility utilities
-# Copyright (C) 2010 Google, Inc.
-#
-# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
-# General Public License as public by the Free Software Foundation; version 2.0
-# or (at your option) any later version. You can redistribute it and/or
-# modify it under the terms of either of these two licenses.
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# You should have received a copy of the licenses; if not, see
-# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
-# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
-# License, Version 2.0.
-#
-
-"""Tests for git compatibility utilities."""
-
-from dulwich.tests import SkipTest, TestCase
-from dulwich.tests.compat import utils
-
-
-class GitVersionTests(TestCase):
- def setUp(self):
- super().setUp()
- self._orig_run_git = utils.run_git
- self._version_str = None # tests can override to set stub version
-
- def run_git(args, **unused_kwargs):
- self.assertEqual(["--version"], args)
- return 0, self._version_str, ""
-
- utils.run_git = run_git
-
- def tearDown(self):
- super().tearDown()
- utils.run_git = self._orig_run_git
-
- def test_git_version_none(self):
- self._version_str = b"not a git version"
- self.assertEqual(None, utils.git_version())
-
- def test_git_version_3(self):
- self._version_str = b"git version 1.6.6"
- self.assertEqual((1, 6, 6, 0), utils.git_version())
-
- def test_git_version_4(self):
- self._version_str = b"git version 1.7.0.2"
- self.assertEqual((1, 7, 0, 2), utils.git_version())
-
- def test_git_version_extra(self):
- self._version_str = b"git version 1.7.0.3.295.gd8fa2"
- self.assertEqual((1, 7, 0, 3), utils.git_version())
-
- def assertRequireSucceeds(self, required_version):
- try:
- utils.require_git_version(required_version)
- except SkipTest:
- self.fail()
-
- def assertRequireFails(self, required_version):
- self.assertRaises(SkipTest, utils.require_git_version, required_version)
-
- def test_require_git_version(self):
- try:
- self._version_str = b"git version 1.6.6"
- self.assertRequireSucceeds((1, 6, 6))
- self.assertRequireSucceeds((1, 6, 6, 0))
- self.assertRequireSucceeds((1, 6, 5))
- self.assertRequireSucceeds((1, 6, 5, 99))
- self.assertRequireFails((1, 7, 0))
- self.assertRequireFails((1, 7, 0, 2))
- self.assertRaises(ValueError, utils.require_git_version, (1, 6, 6, 0, 0))
-
- self._version_str = b"git version 1.7.0.2"
- self.assertRequireSucceeds((1, 6, 6))
- self.assertRequireSucceeds((1, 6, 6, 0))
- self.assertRequireSucceeds((1, 7, 0))
- self.assertRequireSucceeds((1, 7, 0, 2))
- self.assertRequireFails((1, 7, 0, 3))
- self.assertRequireFails((1, 7, 1))
- except SkipTest as e:
- # This test is designed to catch all SkipTest exceptions.
- self.fail("Test unexpectedly skipped: %s" % e)
blob - ff53f642bcabf5c73d09684b5686dfa7fb3f4745 (mode 644)
blob + /dev/null
--- dulwich/tests/compat/test_web.py
+++ /dev/null
-# test_web.py -- Compatibility tests for the git web server.
-# Copyright (C) 2010 Google, Inc.
-#
-# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
-# General Public License as public by the Free Software Foundation; version 2.0
-# or (at your option) any later version. You can redistribute it and/or
-# modify it under the terms of either of these two licenses.
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# You should have received a copy of the licenses; if not, see
-# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
-# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
-# License, Version 2.0.
-#
-
-"""Compatibility tests between Dulwich and the cgit HTTP server.
-
-warning: these tests should be fairly stable, but when writing/debugging new
- tests, deadlocks may freeze the test process such that it cannot be
- Ctrl-C'ed. On POSIX systems, you can kill the tests with Ctrl-Z, "kill %".
-"""
-
-import sys
-import threading
-from typing import Tuple
-from wsgiref import simple_server
-
-from dulwich.tests import SkipTest, skipIf
-
-from ...server import DictBackend, ReceivePackHandler, UploadPackHandler
-from ...web import (
- HTTPGitApplication,
- WSGIRequestHandlerLogger,
- WSGIServerLogger,
- make_wsgi_chain,
-)
-from .server_utils import NoSideBand64kReceivePackHandler, ServerTests
-from .utils import CompatTestCase
-
-
-@skipIf(sys.platform == "win32", "Broken on windows, with very long fail time.")
-class WebTests(ServerTests):
- """Base tests for web server tests.
-
- Contains utility and setUp/tearDown methods, but does non inherit from
- TestCase so tests are not automatically run.
- """
-
- protocol = "http"
-
- def _start_server(self, repo):
- backend = DictBackend({"/": repo})
- app = self._make_app(backend)
- dul_server = simple_server.make_server(
- "localhost",
- 0,
- app,
- server_class=WSGIServerLogger,
- handler_class=WSGIRequestHandlerLogger,
- )
- self.addCleanup(dul_server.shutdown)
- self.addCleanup(dul_server.server_close)
- threading.Thread(target=dul_server.serve_forever).start()
- self._server = dul_server
- _, port = dul_server.socket.getsockname()
- return port
-
-
-@skipIf(sys.platform == "win32", "Broken on windows, with very long fail time.")
-class SmartWebTestCase(WebTests, CompatTestCase):
- """Test cases for smart HTTP server.
-
- This server test case does not use side-band-64k in git-receive-pack.
- """
-
- min_git_version: Tuple[int, ...] = (1, 6, 6)
-
- def _handlers(self):
- return {b"git-receive-pack": NoSideBand64kReceivePackHandler}
-
- def _check_app(self, app):
- receive_pack_handler_cls = app.handlers[b"git-receive-pack"]
- caps = receive_pack_handler_cls.capabilities()
- self.assertNotIn(b"side-band-64k", caps)
-
- def _make_app(self, backend):
- app = make_wsgi_chain(backend, handlers=self._handlers())
- to_check = app
- # peel back layers until we're at the base application
- while not issubclass(to_check.__class__, HTTPGitApplication):
- to_check = to_check.app
- self._check_app(to_check)
- return app
-
-
-def patch_capabilities(handler, caps_removed):
- # Patch a handler's capabilities by specifying a list of them to be
- # removed, and return the original classmethod for restoration.
- original_capabilities = handler.capabilities
- filtered_capabilities = [
- i for i in original_capabilities() if i not in caps_removed
- ]
-
- def capabilities(cls):
- return filtered_capabilities
-
- handler.capabilities = classmethod(capabilities)
- return original_capabilities
-
-
-@skipIf(sys.platform == "win32", "Broken on windows, with very long fail time.")
-class SmartWebSideBand64kTestCase(SmartWebTestCase):
- """Test cases for smart HTTP server with side-band-64k support."""
-
- # side-band-64k in git-receive-pack was introduced in git 1.7.0.2
- min_git_version = (1, 7, 0, 2)
-
- def setUp(self):
- self.o_uph_cap = patch_capabilities(UploadPackHandler, (b"no-done",))
- self.o_rph_cap = patch_capabilities(ReceivePackHandler, (b"no-done",))
- super().setUp()
-
- def tearDown(self):
- super().tearDown()
- UploadPackHandler.capabilities = self.o_uph_cap
- ReceivePackHandler.capabilities = self.o_rph_cap
-
- def _handlers(self):
- return None # default handlers include side-band-64k
-
- def _check_app(self, app):
- receive_pack_handler_cls = app.handlers[b"git-receive-pack"]
- caps = receive_pack_handler_cls.capabilities()
- self.assertIn(b"side-band-64k", caps)
- self.assertNotIn(b"no-done", caps)
-
-
-class SmartWebSideBand64kNoDoneTestCase(SmartWebTestCase):
- """Test cases for smart HTTP server with side-band-64k and no-done
- support.
- """
-
- # no-done was introduced in git 1.7.4
- min_git_version = (1, 7, 4)
-
- def _handlers(self):
- return None # default handlers include side-band-64k
-
- def _check_app(self, app):
- receive_pack_handler_cls = app.handlers[b"git-receive-pack"]
- caps = receive_pack_handler_cls.capabilities()
- self.assertIn(b"side-band-64k", caps)
- self.assertIn(b"no-done", caps)
-
-
-@skipIf(sys.platform == "win32", "Broken on windows, with very long fail time.")
-class DumbWebTestCase(WebTests, CompatTestCase):
- """Test cases for dumb HTTP server."""
-
- def _make_app(self, backend):
- return make_wsgi_chain(backend, dumb=True)
-
- def test_push_to_dulwich(self):
- # Note: remove this if dulwich implements dumb web pushing.
- raise SkipTest("Dumb web pushing not supported.")
-
- def test_push_to_dulwich_remove_branch(self):
- # Note: remove this if dumb pushing is supported
- raise SkipTest("Dumb web pushing not supported.")
-
- def test_new_shallow_clone_from_dulwich(self):
- # Note: remove this if C git and dulwich implement dumb web shallow
- # clones.
- raise SkipTest("Dumb web shallow cloning not supported.")
-
- def test_shallow_clone_from_git_is_identical(self):
- # Note: remove this if C git and dulwich implement dumb web shallow
- # clones.
- raise SkipTest("Dumb web shallow cloning not supported.")
-
- def test_fetch_same_depth_into_shallow_clone_from_dulwich(self):
- # Note: remove this if C git and dulwich implement dumb web shallow
- # clones.
- raise SkipTest("Dumb web shallow cloning not supported.")
-
- def test_fetch_full_depth_into_shallow_clone_from_dulwich(self):
- # Note: remove this if C git and dulwich implement dumb web shallow
- # clones.
- raise SkipTest("Dumb web shallow cloning not supported.")
-
- def test_push_to_dulwich_issue_88_standard(self):
- raise SkipTest("Dumb web pushing not supported.")
blob - d7c4a7b61b3ca0b8194fd3cb64116be677a9c2aa (mode 644)
blob + /dev/null
--- dulwich/tests/compat/utils.py
+++ /dev/null
-# utils.py -- Git compatibility utilities
-# Copyright (C) 2010 Google, Inc.
-#
-# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
-# General Public License as public by the Free Software Foundation; version 2.0
-# or (at your option) any later version. You can redistribute it and/or
-# modify it under the terms of either of these two licenses.
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# You should have received a copy of the licenses; if not, see
-# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
-# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
-# License, Version 2.0.
-#
-
-"""Utilities for interacting with cgit."""
-
-import errno
-import functools
-import os
-import shutil
-import socket
-import stat
-import subprocess
-import sys
-import tempfile
-import time
-from typing import Tuple
-
-from dulwich.tests import SkipTest, TestCase
-
-from ...protocol import TCP_GIT_PORT
-from ...repo import Repo
-
-_DEFAULT_GIT = "git"
-_VERSION_LEN = 4
-_REPOS_DATA_DIR = os.path.abspath(
- os.path.join(
- os.path.dirname(__file__), os.pardir, os.pardir, os.pardir, "testdata", "repos"
- )
-)
-
-
-def git_version(git_path=_DEFAULT_GIT):
- """Attempt to determine the version of git currently installed.
-
- Args:
- git_path: Path to the git executable; defaults to the version in
- the system path.
- Returns: A tuple of ints of the form (major, minor, point, sub-point), or
- None if no git installation was found.
- """
- try:
- output = run_git_or_fail(["--version"], git_path=git_path)
- except OSError:
- return None
- version_prefix = b"git version "
- if not output.startswith(version_prefix):
- return None
-
- parts = output[len(version_prefix) :].split(b".")
- nums = []
- for part in parts:
- try:
- nums.append(int(part))
- except ValueError:
- break
-
- while len(nums) < _VERSION_LEN:
- nums.append(0)
- return tuple(nums[:_VERSION_LEN])
-
-
-def require_git_version(required_version, git_path=_DEFAULT_GIT):
- """Require git version >= version, or skip the calling test.
-
- Args:
- required_version: A tuple of ints of the form (major, minor, point,
- sub-point); omitted components default to 0.
- git_path: Path to the git executable; defaults to the version in
- the system path.
-
- Raises:
- ValueError: if the required version tuple has too many parts.
- SkipTest: if no suitable git version was found at the given path.
- """
- found_version = git_version(git_path=git_path)
- if found_version is None:
- raise SkipTest(f"Test requires git >= {required_version}, but c git not found")
-
- if len(required_version) > _VERSION_LEN:
- raise ValueError(
- "Invalid version tuple %s, expected %i parts"
- % (required_version, _VERSION_LEN)
- )
-
- required_version = list(required_version)
- while len(found_version) < len(required_version):
- required_version.append(0)
- required_version = tuple(required_version)
-
- if found_version < required_version:
- required_version = ".".join(map(str, required_version))
- found_version = ".".join(map(str, found_version))
- raise SkipTest(
- f"Test requires git >= {required_version}, found {found_version}"
- )
-
-
-def run_git(
- args,
- git_path=_DEFAULT_GIT,
- input=None,
- capture_stdout=False,
- capture_stderr=False,
- **popen_kwargs,
-):
- """Run a git command.
-
- Input is piped from the input parameter and output is sent to the standard
- streams, unless capture_stdout is set.
-
- Args:
- args: A list of args to the git command.
- git_path: Path to to the git executable.
- input: Input data to be sent to stdin.
- capture_stdout: Whether to capture and return stdout.
- popen_kwargs: Additional kwargs for subprocess.Popen;
- stdin/stdout args are ignored.
- Returns: A tuple of (returncode, stdout contents, stderr contents).
- If capture_stdout is False, None will be returned as stdout contents.
- If capture_stderr is False, None will be returned as stderr contents.
-
- Raises:
- OSError: if the git executable was not found.
- """
- env = popen_kwargs.pop("env", {})
- env["LC_ALL"] = env["LANG"] = "C"
- env["PATH"] = os.getenv("PATH")
-
- args = [git_path, *args]
- popen_kwargs["stdin"] = subprocess.PIPE
- if capture_stdout:
- popen_kwargs["stdout"] = subprocess.PIPE
- else:
- popen_kwargs.pop("stdout", None)
- if capture_stderr:
- popen_kwargs["stderr"] = subprocess.PIPE
- else:
- popen_kwargs.pop("stderr", None)
- p = subprocess.Popen(args, env=env, **popen_kwargs)
- stdout, stderr = p.communicate(input=input)
- return (p.returncode, stdout, stderr)
-
-
-def run_git_or_fail(args, git_path=_DEFAULT_GIT, input=None, **popen_kwargs):
- """Run a git command, capture stdout/stderr, and fail if git fails."""
- if "stderr" not in popen_kwargs:
- popen_kwargs["stderr"] = subprocess.STDOUT
- returncode, stdout, stderr = run_git(
- args,
- git_path=git_path,
- input=input,
- capture_stdout=True,
- capture_stderr=True,
- **popen_kwargs,
- )
- if returncode != 0:
- raise AssertionError(
- "git with args %r failed with %d: stdout=%r stderr=%r"
- % (args, returncode, stdout, stderr)
- )
- return stdout
-
-
-def import_repo_to_dir(name):
- """Import a repo from a fast-export file in a temporary directory.
-
- These are used rather than binary repos for compat tests because they are
- more compact and human-editable, and we already depend on git.
-
- Args:
- name: The name of the repository export file, relative to
- dulwich/tests/data/repos.
- Returns: The path to the imported repository.
- """
- temp_dir = tempfile.mkdtemp()
- export_path = os.path.join(_REPOS_DATA_DIR, name)
- temp_repo_dir = os.path.join(temp_dir, name)
- export_file = open(export_path, "rb")
- run_git_or_fail(["init", "--quiet", "--bare", temp_repo_dir])
- run_git_or_fail(["fast-import"], input=export_file.read(), cwd=temp_repo_dir)
- export_file.close()
- return temp_repo_dir
-
-
-def check_for_daemon(limit=10, delay=0.1, timeout=0.1, port=TCP_GIT_PORT):
- """Check for a running TCP daemon.
-
- Defaults to checking 10 times with a delay of 0.1 sec between tries.
-
- Args:
- limit: Number of attempts before deciding no daemon is running.
- delay: Delay between connection attempts.
- timeout: Socket timeout for connection attempts.
- port: Port on which we expect the daemon to appear.
- Returns: A boolean, true if a daemon is running on the specified port,
- false if not.
- """
- for _ in range(limit):
- time.sleep(delay)
- s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
- s.settimeout(delay)
- try:
- s.connect(("localhost", port))
- return True
- except socket.timeout:
- pass
- except OSError as e:
- if getattr(e, "errno", False) and e.errno != errno.ECONNREFUSED:
- raise
- elif e.args[0] != errno.ECONNREFUSED:
- raise
- finally:
- s.close()
- return False
-
-
-class CompatTestCase(TestCase):
- """Test case that requires git for compatibility checks.
-
- Subclasses can change the git version required by overriding
- min_git_version.
- """
-
- min_git_version: Tuple[int, ...] = (1, 5, 0)
-
- def setUp(self):
- super().setUp()
- require_git_version(self.min_git_version)
-
- def assertObjectStoreEqual(self, store1, store2):
- self.assertEqual(sorted(set(store1)), sorted(set(store2)))
-
- def assertReposEqual(self, repo1, repo2):
- self.assertEqual(repo1.get_refs(), repo2.get_refs())
- self.assertObjectStoreEqual(repo1.object_store, repo2.object_store)
-
- def assertReposNotEqual(self, repo1, repo2):
- refs1 = repo1.get_refs()
- objs1 = set(repo1.object_store)
- refs2 = repo2.get_refs()
- objs2 = set(repo2.object_store)
- self.assertFalse(refs1 == refs2 and objs1 == objs2)
-
- def import_repo(self, name):
- """Import a repo from a fast-export file in a temporary directory.
-
- Args:
- name: The name of the repository export file, relative to
- dulwich/tests/data/repos.
- Returns: An initialized Repo object that lives in a temporary
- directory.
- """
- path = import_repo_to_dir(name)
- repo = Repo(path)
-
- def cleanup():
- repo.close()
- rmtree_ro(os.path.dirname(path.rstrip(os.sep)))
-
- self.addCleanup(cleanup)
- return repo
-
-
-if sys.platform == "win32":
-
- def remove_ro(action, name, exc):
- os.chmod(name, stat.S_IWRITE)
- os.remove(name)
-
- rmtree_ro = functools.partial(shutil.rmtree, onerror=remove_ro)
-else:
- rmtree_ro = shutil.rmtree
blob - 0c84509df8083ee8dab1fb5584e30ad88b7d6865 (mode 644)
blob + /dev/null
--- dulwich/tests/test_archive.py
+++ /dev/null
-# test_archive.py -- tests for archive
-# Copyright (C) 2015 Jelmer Vernooij <jelmer@jelmer.uk>
-#
-# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
-# General Public License as public by the Free Software Foundation; version 2.0
-# or (at your option) any later version. You can redistribute it and/or
-# modify it under the terms of either of these two licenses.
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# You should have received a copy of the licenses; if not, see
-# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
-# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
-# License, Version 2.0.
-#
-
-"""Tests for archive support."""
-
-import struct
-import tarfile
-from io import BytesIO
-from unittest import skipUnless
-
-from dulwich.tests import TestCase
-
-from ..archive import tar_stream
-from ..object_store import MemoryObjectStore
-from ..objects import Blob, Tree
-from .utils import build_commit_graph
-
-try:
- from unittest.mock import patch
-except ImportError:
- patch = None # type: ignore
-
-
-class ArchiveTests(TestCase):
- def test_empty(self):
- store = MemoryObjectStore()
- c1, c2, c3 = build_commit_graph(store, [[1], [2, 1], [3, 1, 2]])
- tree = store[c3.tree]
- stream = b"".join(tar_stream(store, tree, 10))
- out = BytesIO(stream)
- tf = tarfile.TarFile(fileobj=out)
- self.addCleanup(tf.close)
- self.assertEqual([], tf.getnames())
-
- def _get_example_tar_stream(self, *tar_stream_args, **tar_stream_kwargs):
- store = MemoryObjectStore()
- b1 = Blob.from_string(b"somedata")
- store.add_object(b1)
- t1 = Tree()
- t1.add(b"somename", 0o100644, b1.id)
- store.add_object(t1)
- stream = b"".join(tar_stream(store, t1, *tar_stream_args, **tar_stream_kwargs))
- return BytesIO(stream)
-
- def test_simple(self):
- stream = self._get_example_tar_stream(mtime=0)
- tf = tarfile.TarFile(fileobj=stream)
- self.addCleanup(tf.close)
- self.assertEqual(["somename"], tf.getnames())
-
- def test_unicode(self):
- store = MemoryObjectStore()
- b1 = Blob.from_string(b"somedata")
- store.add_object(b1)
- t1 = Tree()
- t1.add("ő".encode(), 0o100644, b1.id)
- store.add_object(t1)
- stream = b"".join(tar_stream(store, t1, mtime=0))
- tf = tarfile.TarFile(fileobj=BytesIO(stream))
- self.addCleanup(tf.close)
- self.assertEqual(["ő"], tf.getnames())
-
- def test_prefix(self):
- stream = self._get_example_tar_stream(mtime=0, prefix=b"blah")
- tf = tarfile.TarFile(fileobj=stream)
- self.addCleanup(tf.close)
- self.assertEqual(["blah/somename"], tf.getnames())
-
- def test_gzip_mtime(self):
- stream = self._get_example_tar_stream(mtime=1234, format="gz")
- expected_mtime = struct.pack("<L", 1234)
- self.assertEqual(stream.getvalue()[4:8], expected_mtime)
-
- @skipUnless(patch, "Required mock.patch")
- def test_same_file(self):
- contents = [None, None]
- for format in ["", "gz", "bz2"]:
- for i in [0, 1]:
- with patch("time.time", return_value=i):
- stream = self._get_example_tar_stream(mtime=0, format=format)
- contents[i] = stream.getvalue()
- self.assertEqual(
- contents[0],
- contents[1],
- "Different file contents for format %r" % format,
- )
blob - 7041ce00d3f1f8e41196e101e30b961b098ed8bc (mode 644)
blob + /dev/null
--- dulwich/tests/test_blackbox.py
+++ /dev/null
-# test_blackbox.py -- blackbox tests
-# Copyright (C) 2010 Jelmer Vernooij <jelmer@jelmer.uk>
-#
-# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
-# General Public License as public by the Free Software Foundation; version 2.0
-# or (at your option) any later version. You can redistribute it and/or
-# modify it under the terms of either of these two licenses.
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# You should have received a copy of the licenses; if not, see
-# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
-# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
-# License, Version 2.0.
-#
-
-"""Blackbox tests for Dulwich commands."""
-
-import shutil
-import tempfile
-
-from dulwich.tests import BlackboxTestCase
-
-from ..repo import Repo
-
-
-class GitReceivePackTests(BlackboxTestCase):
- """Blackbox tests for dul-receive-pack."""
-
- def setUp(self):
- super().setUp()
- self.path = tempfile.mkdtemp()
- self.addCleanup(shutil.rmtree, self.path)
- self.repo = Repo.init(self.path)
-
- def test_basic(self):
- process = self.run_command("dul-receive-pack", [self.path])
- (stdout, stderr) = process.communicate(b"0000")
- self.assertEqual(b"0000", stdout[-4:])
- self.assertEqual(0, process.returncode)
-
- def test_missing_arg(self):
- process = self.run_command("dul-receive-pack", [])
- (stdout, stderr) = process.communicate()
- self.assertEqual(
- [b"usage: dul-receive-pack <git-dir>"], stderr.splitlines()[-1:]
- )
- self.assertEqual(b"", stdout)
- self.assertEqual(1, process.returncode)
-
-
-class GitUploadPackTests(BlackboxTestCase):
- """Blackbox tests for dul-upload-pack."""
-
- def setUp(self):
- super().setUp()
- self.path = tempfile.mkdtemp()
- self.addCleanup(shutil.rmtree, self.path)
- self.repo = Repo.init(self.path)
-
- def test_missing_arg(self):
- process = self.run_command("dul-upload-pack", [])
- (stdout, stderr) = process.communicate()
- self.assertEqual(
- [b"usage: dul-upload-pack <git-dir>"], stderr.splitlines()[-1:]
- )
- self.assertEqual(b"", stdout)
- self.assertEqual(1, process.returncode)
blob - 1f0cba2c810290317f29451eb7e9c9f0130ff1b6 (mode 644)
blob + /dev/null
--- dulwich/tests/test_bundle.py
+++ /dev/null
-# test_bundle.py -- tests for bundle
-# Copyright (C) 2020 Jelmer Vernooij <jelmer@jelmer.uk>
-#
-# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
-# General Public License as public by the Free Software Foundation; version 2.0
-# or (at your option) any later version. You can redistribute it and/or
-# modify it under the terms of either of these two licenses.
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# You should have received a copy of the licenses; if not, see
-# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
-# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
-# License, Version 2.0.
-#
-
-"""Tests for bundle support."""
-
-import os
-import tempfile
-from io import BytesIO
-
-from dulwich.tests import TestCase
-
-from ..bundle import Bundle, read_bundle, write_bundle
-from ..pack import PackData, write_pack_objects
-
-
-class BundleTests(TestCase):
- def test_roundtrip_bundle(self):
- origbundle = Bundle()
- origbundle.version = 3
- origbundle.capabilities = {"foo": None}
- origbundle.references = {b"refs/heads/master": b"ab" * 20}
- origbundle.prerequisites = [(b"cc" * 20, "comment")]
- b = BytesIO()
- write_pack_objects(b.write, [])
- b.seek(0)
- origbundle.pack_data = PackData.from_file(b)
- with tempfile.TemporaryDirectory() as td:
- with open(os.path.join(td, "foo"), "wb") as f:
- write_bundle(f, origbundle)
-
- with open(os.path.join(td, "foo"), "rb") as f:
- newbundle = read_bundle(f)
-
- self.assertEqual(origbundle, newbundle)
blob - 72eebc3e38ee8e04b0244d8a6a960d1a463ca061 (mode 644)
blob + /dev/null
--- dulwich/tests/test_client.py
+++ /dev/null
-# test_client.py -- Tests for the git protocol, client side
-# Copyright (C) 2009 Jelmer Vernooij <jelmer@jelmer.uk>
-#
-# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
-# General Public License as public by the Free Software Foundation; version 2.0
-# or (at your option) any later version. You can redistribute it and/or
-# modify it under the terms of either of these two licenses.
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# You should have received a copy of the licenses; if not, see
-# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
-# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
-# License, Version 2.0.
-#
-
-import base64
-import os
-import shutil
-import sys
-import tempfile
-import warnings
-from io import BytesIO
-from typing import Dict
-from unittest.mock import patch
-from urllib.parse import quote as urlquote
-from urllib.parse import urlparse
-
-import dulwich
-from dulwich import client
-from dulwich.tests import TestCase, skipIf
-
-from ..client import (
- FetchPackResult,
- GitProtocolError,
- HangupException,
- HttpGitClient,
- InvalidWants,
- LocalGitClient,
- PLinkSSHVendor,
- ReportStatusParser,
- SendPackError,
- SSHGitClient,
- StrangeHostname,
- SubprocessSSHVendor,
- TCPGitClient,
- TraditionalGitClient,
- _remote_error_from_stderr,
- check_wants,
- default_urllib3_manager,
- get_credentials_from_store,
- get_transport_and_path,
- get_transport_and_path_from_url,
- parse_rsync_url,
-)
-from ..config import ConfigDict
-from ..objects import Commit, Tree
-from ..pack import pack_objects_to_data, write_pack_data, write_pack_objects
-from ..protocol import TCP_GIT_PORT, Protocol
-from ..repo import MemoryRepo, Repo
-from .utils import open_repo, setup_warning_catcher, tear_down_repo
-
-
-class DummyClient(TraditionalGitClient):
- def __init__(self, can_read, read, write) -> None:
- self.can_read = can_read
- self.read = read
- self.write = write
- TraditionalGitClient.__init__(self)
-
- def _connect(self, service, path):
- return Protocol(self.read, self.write), self.can_read, None
-
-
-class DummyPopen:
- def __init__(self, *args, **kwards) -> None:
- self.stdin = BytesIO(b"stdin")
- self.stdout = BytesIO(b"stdout")
- self.stderr = BytesIO(b"stderr")
- self.returncode = 0
- self.args = args
- self.kwargs = kwards
-
- def communicate(self, *args, **kwards):
- return ("Running", "")
-
- def wait(self, *args, **kwards):
- return False
-
-
-# TODO(durin42): add unit-level tests of GitClient
-class GitClientTests(TestCase):
- def setUp(self):
- super().setUp()
- self.rout = BytesIO()
- self.rin = BytesIO()
- self.client = DummyClient(lambda x: True, self.rin.read, self.rout.write)
-
- def test_caps(self):
- agent_cap = ("agent=dulwich/%d.%d.%d" % dulwich.__version__).encode("ascii")
- self.assertEqual(
- {
- b"multi_ack",
- b"side-band-64k",
- b"ofs-delta",
- b"thin-pack",
- b"multi_ack_detailed",
- b"shallow",
- agent_cap,
- },
- set(self.client._fetch_capabilities),
- )
- self.assertEqual(
- {
- b"delete-refs",
- b"ofs-delta",
- b"report-status",
- b"side-band-64k",
- agent_cap,
- },
- set(self.client._send_capabilities),
- )
-
- def test_archive_ack(self):
- self.rin.write(b"0009NACK\n" b"0000")
- self.rin.seek(0)
- self.client.archive(b"bla", b"HEAD", None, None)
- self.assertEqual(self.rout.getvalue(), b"0011argument HEAD0000")
-
- def test_fetch_empty(self):
- self.rin.write(b"0000")
- self.rin.seek(0)
-
- def check_heads(heads, **kwargs):
- self.assertEqual(heads, {})
- return []
-
- ret = self.client.fetch_pack(b"/", check_heads, None, None)
- self.assertEqual({}, ret.refs)
- self.assertEqual({}, ret.symrefs)
-
- def test_fetch_pack_ignores_magic_ref(self):
- self.rin.write(
- b"00000000000000000000000000000000000000000000 capabilities^{}"
- b"\x00 multi_ack "
- b"thin-pack side-band side-band-64k ofs-delta shallow no-progress "
- b"include-tag\n"
- b"0000"
- )
- self.rin.seek(0)
-
- def check_heads(heads, **kwargs):
- self.assertEqual({}, heads)
- return []
-
- ret = self.client.fetch_pack(b"bla", check_heads, None, None, None)
- self.assertEqual({}, ret.refs)
- self.assertEqual({}, ret.symrefs)
- self.assertEqual(self.rout.getvalue(), b"0000")
-
- def test_fetch_pack_none(self):
- self.rin.write(
- b"008855dcc6bf963f922e1ed5c4bbaaefcfacef57b1d7 HEAD\x00multi_ack "
- b"thin-pack side-band side-band-64k ofs-delta shallow no-progress "
- b"include-tag\n"
- b"0000"
- )
- self.rin.seek(0)
- ret = self.client.fetch_pack(
- b"bla", lambda heads, **kwargs: [], None, None, None
- )
- self.assertEqual(
- {b"HEAD": b"55dcc6bf963f922e1ed5c4bbaaefcfacef57b1d7"}, ret.refs
- )
- self.assertEqual({}, ret.symrefs)
- self.assertEqual(self.rout.getvalue(), b"0000")
-
- def test_send_pack_no_sideband64k_with_update_ref_error(self) -> None:
- # No side-bank-64k reported by server shouldn't try to parse
- # side band data
- pkts = [
- b"55dcc6bf963f922e1ed5c4bbaaefcfacef57b1d7 capabilities^{}"
- b"\x00 report-status delete-refs ofs-delta\n",
- b"",
- b"unpack ok",
- b"ng refs/foo/bar pre-receive hook declined",
- b"",
- ]
- for pkt in pkts:
- if pkt == b"":
- self.rin.write(b"0000")
- else:
- self.rin.write(("%04x" % (len(pkt) + 4)).encode("ascii") + pkt)
- self.rin.seek(0)
-
- tree = Tree()
- commit = Commit()
- commit.tree = tree
- commit.parents = []
- commit.author = commit.committer = b"test user"
- commit.commit_time = commit.author_time = 1174773719
- commit.commit_timezone = commit.author_timezone = 0
- commit.encoding = b"UTF-8"
- commit.message = b"test message"
-
- def update_refs(refs):
- return {
- b"refs/foo/bar": commit.id,
- }
-
- def generate_pack_data(have, want, ofs_delta=False, progress=None):
- return pack_objects_to_data(
- [
- (commit, None),
- (tree, b""),
- ]
- )
-
- result = self.client.send_pack("blah", update_refs, generate_pack_data)
- self.assertEqual(
- {b"refs/foo/bar": "pre-receive hook declined"}, result.ref_status
- )
- self.assertEqual({b"refs/foo/bar": commit.id}, result.refs)
-
- def test_send_pack_none(self):
- # Set ref to current value
- self.rin.write(
- b"0078310ca9477129b8586fa2afc779c1f57cf64bba6c "
- b"refs/heads/master\x00 report-status delete-refs "
- b"side-band-64k quiet ofs-delta\n"
- b"0000"
- )
- self.rin.seek(0)
-
- def update_refs(refs):
- return {b"refs/heads/master": b"310ca9477129b8586fa2afc779c1f57cf64bba6c"}
-
- def generate_pack_data(have, want, ofs_delta=False, progress=None):
- return 0, []
-
- self.client.send_pack(b"/", update_refs, generate_pack_data)
- self.assertEqual(self.rout.getvalue(), b"0000")
-
- def test_send_pack_keep_and_delete(self):
- self.rin.write(
- b"0063310ca9477129b8586fa2afc779c1f57cf64bba6c "
- b"refs/heads/master\x00report-status delete-refs ofs-delta\n"
- b"003f310ca9477129b8586fa2afc779c1f57cf64bba6c refs/heads/keepme\n"
- b"0000000eunpack ok\n"
- b"0019ok refs/heads/master\n"
- b"0000"
- )
- self.rin.seek(0)
-
- def update_refs(refs):
- return {b"refs/heads/master": b"0" * 40}
-
- def generate_pack_data(have, want, ofs_delta=False, progress=None):
- return 0, []
-
- self.client.send_pack(b"/", update_refs, generate_pack_data)
- self.assertEqual(
- self.rout.getvalue(),
- b"008b310ca9477129b8586fa2afc779c1f57cf64bba6c "
- b"0000000000000000000000000000000000000000 "
- b"refs/heads/master\x00delete-refs ofs-delta report-status0000",
- )
-
- def test_send_pack_delete_only(self):
- self.rin.write(
- b"0063310ca9477129b8586fa2afc779c1f57cf64bba6c "
- b"refs/heads/master\x00report-status delete-refs ofs-delta\n"
- b"0000000eunpack ok\n"
- b"0019ok refs/heads/master\n"
- b"0000"
- )
- self.rin.seek(0)
-
- def update_refs(refs):
- return {b"refs/heads/master": b"0" * 40}
-
- def generate_pack_data(have, want, ofs_delta=False, progress=None):
- return 0, []
-
- self.client.send_pack(b"/", update_refs, generate_pack_data)
- self.assertEqual(
- self.rout.getvalue(),
- b"008b310ca9477129b8586fa2afc779c1f57cf64bba6c "
- b"0000000000000000000000000000000000000000 "
- b"refs/heads/master\x00delete-refs ofs-delta report-status0000",
- )
-
- def test_send_pack_new_ref_only(self):
- self.rin.write(
- b"0063310ca9477129b8586fa2afc779c1f57cf64bba6c "
- b"refs/heads/master\x00report-status delete-refs ofs-delta\n"
- b"0000000eunpack ok\n"
- b"0019ok refs/heads/blah12\n"
- b"0000"
- )
- self.rin.seek(0)
-
- def update_refs(refs):
- return {
- b"refs/heads/blah12": b"310ca9477129b8586fa2afc779c1f57cf64bba6c",
- b"refs/heads/master": b"310ca9477129b8586fa2afc779c1f57cf64bba6c",
- }
-
- def generate_pack_data(have, want, ofs_delta=False, progress=None):
- return 0, []
-
- f = BytesIO()
- write_pack_objects(f.write, [])
- self.client.send_pack("/", update_refs, generate_pack_data)
- self.assertEqual(
- self.rout.getvalue(),
- b"008b0000000000000000000000000000000000000000 "
- b"310ca9477129b8586fa2afc779c1f57cf64bba6c "
- b"refs/heads/blah12\x00delete-refs ofs-delta report-status0000"
- + f.getvalue(),
- )
-
- def test_send_pack_new_ref(self):
- self.rin.write(
- b"0064310ca9477129b8586fa2afc779c1f57cf64bba6c "
- b"refs/heads/master\x00 report-status delete-refs ofs-delta\n"
- b"0000000eunpack ok\n"
- b"0019ok refs/heads/blah12\n"
- b"0000"
- )
- self.rin.seek(0)
-
- tree = Tree()
- commit = Commit()
- commit.tree = tree
- commit.parents = []
- commit.author = commit.committer = b"test user"
- commit.commit_time = commit.author_time = 1174773719
- commit.commit_timezone = commit.author_timezone = 0
- commit.encoding = b"UTF-8"
- commit.message = b"test message"
-
- def update_refs(refs):
- return {
- b"refs/heads/blah12": commit.id,
- b"refs/heads/master": b"310ca9477129b8586fa2afc779c1f57cf64bba6c",
- }
-
- def generate_pack_data(have, want, ofs_delta=False, progress=None):
- return pack_objects_to_data(
- [
- (commit, None),
- (tree, b""),
- ]
- )
-
- f = BytesIO()
- count, records = generate_pack_data(None, None)
- write_pack_data(f.write, records, num_records=count)
- self.client.send_pack(b"/", update_refs, generate_pack_data)
- self.assertEqual(
- self.rout.getvalue(),
- b"008b0000000000000000000000000000000000000000 "
- + commit.id
- + b" refs/heads/blah12\x00delete-refs ofs-delta report-status0000"
- + f.getvalue(),
- )
-
- def test_send_pack_no_deleteref_delete_only(self):
- pkts = [
- b"310ca9477129b8586fa2afc779c1f57cf64bba6c refs/heads/master"
- b"\x00 report-status ofs-delta\n",
- b"",
- b"",
- ]
- for pkt in pkts:
- if pkt == b"":
- self.rin.write(b"0000")
- else:
- self.rin.write(("%04x" % (len(pkt) + 4)).encode("ascii") + pkt)
- self.rin.seek(0)
-
- def update_refs(refs):
- return {b"refs/heads/master": b"0" * 40}
-
- def generate_pack_data(have, want, ofs_delta=False, progress=None):
- return 0, []
-
- result = self.client.send_pack(b"/", update_refs, generate_pack_data)
- self.assertEqual(
- result.ref_status,
- {b"refs/heads/master": "remote does not support deleting refs"},
- )
- self.assertEqual(
- result.refs,
- {b"refs/heads/master": b"310ca9477129b8586fa2afc779c1f57cf64bba6c"},
- )
- self.assertEqual(self.rout.getvalue(), b"0000")
-
-
-class TestGetTransportAndPath(TestCase):
- def test_tcp(self):
- c, path = get_transport_and_path("git://foo.com/bar/baz")
- self.assertIsInstance(c, TCPGitClient)
- self.assertEqual("foo.com", c._host)
- self.assertEqual(TCP_GIT_PORT, c._port)
- self.assertEqual("/bar/baz", path)
-
- def test_tcp_port(self):
- c, path = get_transport_and_path("git://foo.com:1234/bar/baz")
- self.assertIsInstance(c, TCPGitClient)
- self.assertEqual("foo.com", c._host)
- self.assertEqual(1234, c._port)
- self.assertEqual("/bar/baz", path)
-
- def test_git_ssh_explicit(self):
- c, path = get_transport_and_path("git+ssh://foo.com/bar/baz")
- self.assertIsInstance(c, SSHGitClient)
- self.assertEqual("foo.com", c.host)
- self.assertEqual(None, c.port)
- self.assertEqual(None, c.username)
- self.assertEqual("/bar/baz", path)
-
- def test_ssh_explicit(self):
- c, path = get_transport_and_path("ssh://foo.com/bar/baz")
- self.assertIsInstance(c, SSHGitClient)
- self.assertEqual("foo.com", c.host)
- self.assertEqual(None, c.port)
- self.assertEqual(None, c.username)
- self.assertEqual("/bar/baz", path)
-
- def test_ssh_port_explicit(self):
- c, path = get_transport_and_path("git+ssh://foo.com:1234/bar/baz")
- self.assertIsInstance(c, SSHGitClient)
- self.assertEqual("foo.com", c.host)
- self.assertEqual(1234, c.port)
- self.assertEqual("/bar/baz", path)
-
- def test_username_and_port_explicit_unknown_scheme(self):
- c, path = get_transport_and_path("unknown://git@server:7999/dply/stuff.git")
- self.assertIsInstance(c, SSHGitClient)
- self.assertEqual("unknown", c.host)
- self.assertEqual("//git@server:7999/dply/stuff.git", path)
-
- def test_username_and_port_explicit(self):
- c, path = get_transport_and_path("ssh://git@server:7999/dply/stuff.git")
- self.assertIsInstance(c, SSHGitClient)
- self.assertEqual("git", c.username)
- self.assertEqual("server", c.host)
- self.assertEqual(7999, c.port)
- self.assertEqual("/dply/stuff.git", path)
-
- def test_ssh_abspath_doubleslash(self):
- c, path = get_transport_and_path("git+ssh://foo.com//bar/baz")
- self.assertIsInstance(c, SSHGitClient)
- self.assertEqual("foo.com", c.host)
- self.assertEqual(None, c.port)
- self.assertEqual(None, c.username)
- self.assertEqual("//bar/baz", path)
-
- def test_ssh_port(self):
- c, path = get_transport_and_path("git+ssh://foo.com:1234/bar/baz")
- self.assertIsInstance(c, SSHGitClient)
- self.assertEqual("foo.com", c.host)
- self.assertEqual(1234, c.port)
- self.assertEqual("/bar/baz", path)
-
- def test_ssh_implicit(self):
- c, path = get_transport_and_path("foo:/bar/baz")
- self.assertIsInstance(c, SSHGitClient)
- self.assertEqual("foo", c.host)
- self.assertEqual(None, c.port)
- self.assertEqual(None, c.username)
- self.assertEqual("/bar/baz", path)
-
- def test_ssh_host(self):
- c, path = get_transport_and_path("foo.com:/bar/baz")
- self.assertIsInstance(c, SSHGitClient)
- self.assertEqual("foo.com", c.host)
- self.assertEqual(None, c.port)
- self.assertEqual(None, c.username)
- self.assertEqual("/bar/baz", path)
-
- def test_ssh_user_host(self):
- c, path = get_transport_and_path("user@foo.com:/bar/baz")
- self.assertIsInstance(c, SSHGitClient)
- self.assertEqual("foo.com", c.host)
- self.assertEqual(None, c.port)
- self.assertEqual("user", c.username)
- self.assertEqual("/bar/baz", path)
-
- def test_ssh_relpath(self):
- c, path = get_transport_and_path("foo:bar/baz")
- self.assertIsInstance(c, SSHGitClient)
- self.assertEqual("foo", c.host)
- self.assertEqual(None, c.port)
- self.assertEqual(None, c.username)
- self.assertEqual("bar/baz", path)
-
- def test_ssh_host_relpath(self):
- c, path = get_transport_and_path("foo.com:bar/baz")
- self.assertIsInstance(c, SSHGitClient)
- self.assertEqual("foo.com", c.host)
- self.assertEqual(None, c.port)
- self.assertEqual(None, c.username)
- self.assertEqual("bar/baz", path)
-
- def test_ssh_user_host_relpath(self):
- c, path = get_transport_and_path("user@foo.com:bar/baz")
- self.assertIsInstance(c, SSHGitClient)
- self.assertEqual("foo.com", c.host)
- self.assertEqual(None, c.port)
- self.assertEqual("user", c.username)
- self.assertEqual("bar/baz", path)
-
- def test_local(self):
- c, path = get_transport_and_path("foo.bar/baz")
- self.assertIsInstance(c, LocalGitClient)
- self.assertEqual("foo.bar/baz", path)
-
- @skipIf(sys.platform != "win32", "Behaviour only happens on windows.")
- def test_local_abs_windows_path(self):
- c, path = get_transport_and_path("C:\\foo.bar\\baz")
- self.assertIsInstance(c, LocalGitClient)
- self.assertEqual("C:\\foo.bar\\baz", path)
-
- def test_error(self):
- # Need to use a known urlparse.uses_netloc URL scheme to get the
- # expected parsing of the URL on Python versions less than 2.6.5
- c, path = get_transport_and_path("prospero://bar/baz")
- self.assertIsInstance(c, SSHGitClient)
-
- def test_http(self):
- url = "https://github.com/jelmer/dulwich"
- c, path = get_transport_and_path(url)
- self.assertIsInstance(c, HttpGitClient)
- self.assertEqual("/jelmer/dulwich", path)
-
- def test_http_auth(self):
- url = "https://user:passwd@github.com/jelmer/dulwich"
-
- c, path = get_transport_and_path(url)
-
- self.assertIsInstance(c, HttpGitClient)
- self.assertEqual("/jelmer/dulwich", path)
- self.assertEqual("user", c._username)
- self.assertEqual("passwd", c._password)
-
- def test_http_auth_with_username(self):
- url = "https://github.com/jelmer/dulwich"
-
- c, path = get_transport_and_path(url, username="user2", password="blah")
-
- self.assertIsInstance(c, HttpGitClient)
- self.assertEqual("/jelmer/dulwich", path)
- self.assertEqual("user2", c._username)
- self.assertEqual("blah", c._password)
-
- def test_http_auth_with_username_and_in_url(self):
- url = "https://user:passwd@github.com/jelmer/dulwich"
-
- c, path = get_transport_and_path(url, username="user2", password="blah")
-
- self.assertIsInstance(c, HttpGitClient)
- self.assertEqual("/jelmer/dulwich", path)
- self.assertEqual("user", c._username)
- self.assertEqual("passwd", c._password)
-
- def test_http_no_auth(self):
- url = "https://github.com/jelmer/dulwich"
-
- c, path = get_transport_and_path(url)
-
- self.assertIsInstance(c, HttpGitClient)
- self.assertEqual("/jelmer/dulwich", path)
- self.assertIs(None, c._username)
- self.assertIs(None, c._password)
-
-
-class TestGetTransportAndPathFromUrl(TestCase):
- def test_tcp(self):
- c, path = get_transport_and_path_from_url("git://foo.com/bar/baz")
- self.assertIsInstance(c, TCPGitClient)
- self.assertEqual("foo.com", c._host)
- self.assertEqual(TCP_GIT_PORT, c._port)
- self.assertEqual("/bar/baz", path)
-
- def test_tcp_port(self):
- c, path = get_transport_and_path_from_url("git://foo.com:1234/bar/baz")
- self.assertIsInstance(c, TCPGitClient)
- self.assertEqual("foo.com", c._host)
- self.assertEqual(1234, c._port)
- self.assertEqual("/bar/baz", path)
-
- def test_ssh_explicit(self):
- c, path = get_transport_and_path_from_url("git+ssh://foo.com/bar/baz")
- self.assertIsInstance(c, SSHGitClient)
- self.assertEqual("foo.com", c.host)
- self.assertEqual(None, c.port)
- self.assertEqual(None, c.username)
- self.assertEqual("/bar/baz", path)
-
- def test_ssh_port_explicit(self):
- c, path = get_transport_and_path_from_url("git+ssh://foo.com:1234/bar/baz")
- self.assertIsInstance(c, SSHGitClient)
- self.assertEqual("foo.com", c.host)
- self.assertEqual(1234, c.port)
- self.assertEqual("/bar/baz", path)
-
- def test_ssh_homepath(self):
- c, path = get_transport_and_path_from_url("git+ssh://foo.com/~/bar/baz")
- self.assertIsInstance(c, SSHGitClient)
- self.assertEqual("foo.com", c.host)
- self.assertEqual(None, c.port)
- self.assertEqual(None, c.username)
- self.assertEqual("/~/bar/baz", path)
-
- def test_ssh_port_homepath(self):
- c, path = get_transport_and_path_from_url("git+ssh://foo.com:1234/~/bar/baz")
- self.assertIsInstance(c, SSHGitClient)
- self.assertEqual("foo.com", c.host)
- self.assertEqual(1234, c.port)
- self.assertEqual("/~/bar/baz", path)
-
- def test_ssh_host_relpath(self):
- self.assertRaises(
- ValueError, get_transport_and_path_from_url, "foo.com:bar/baz"
- )
-
- def test_ssh_user_host_relpath(self):
- self.assertRaises(
- ValueError, get_transport_and_path_from_url, "user@foo.com:bar/baz"
- )
-
- def test_local_path(self):
- self.assertRaises(ValueError, get_transport_and_path_from_url, "foo.bar/baz")
-
- def test_error(self):
- # Need to use a known urlparse.uses_netloc URL scheme to get the
- # expected parsing of the URL on Python versions less than 2.6.5
- self.assertRaises(
- ValueError, get_transport_and_path_from_url, "prospero://bar/baz"
- )
-
- def test_http(self):
- url = "https://github.com/jelmer/dulwich"
- c, path = get_transport_and_path_from_url(url)
- self.assertIsInstance(c, HttpGitClient)
- self.assertEqual("https://github.com", c.get_url(b"/"))
- self.assertEqual("/jelmer/dulwich", path)
-
- def test_http_port(self):
- url = "https://github.com:9090/jelmer/dulwich"
- c, path = get_transport_and_path_from_url(url)
- self.assertEqual("https://github.com:9090", c.get_url(b"/"))
- self.assertIsInstance(c, HttpGitClient)
- self.assertEqual("/jelmer/dulwich", path)
-
- @patch("os.name", "posix")
- @patch("sys.platform", "linux")
- def test_file(self):
- c, path = get_transport_and_path_from_url("file:///home/jelmer/foo")
- self.assertIsInstance(c, LocalGitClient)
- self.assertEqual("/home/jelmer/foo", path)
-
- @patch("os.name", "nt")
- @patch("sys.platform", "win32")
- def test_file_win(self):
- # `_win32_url_to_path` uses urllib.request.url2pathname, which is set to
- # `ntutl2path.url2pathname` when `os.name==nt`
- from nturl2path import url2pathname
-
- with patch("dulwich.client.url2pathname", url2pathname):
- expected = "C:\\foo.bar\\baz"
- for file_url in [
- "file:C:/foo.bar/baz",
- "file:/C:/foo.bar/baz",
- "file://C:/foo.bar/baz",
- "file://C://foo.bar//baz",
- "file:///C:/foo.bar/baz",
- ]:
- c, path = get_transport_and_path(file_url)
- self.assertIsInstance(c, LocalGitClient)
- self.assertEqual(path, expected)
-
- for remote_url in [
- "file://host.example.com/C:/foo.bar/baz"
- "file://host.example.com/C:/foo.bar/baz"
- "file:////host.example/foo.bar/baz",
- ]:
- with self.assertRaises(NotImplementedError):
- c, path = get_transport_and_path(remote_url)
-
-
-class TestSSHVendor:
- def __init__(self) -> None:
- self.host = None
- self.command = ""
- self.username = None
- self.port = None
- self.password = None
- self.key_filename = None
-
- def run_command(
- self,
- host,
- command,
- username=None,
- port=None,
- password=None,
- key_filename=None,
- ssh_command=None,
- ):
- self.host = host
- self.command = command
- self.username = username
- self.port = port
- self.password = password
- self.key_filename = key_filename
- self.ssh_command = ssh_command
-
- class Subprocess:
- pass
-
- Subprocess.read = lambda: None
- Subprocess.write = lambda: None
- Subprocess.close = lambda: None
- Subprocess.can_read = lambda: None
- return Subprocess()
-
-
-class SSHGitClientTests(TestCase):
- def setUp(self):
- super().setUp()
-
- self.server = TestSSHVendor()
- self.real_vendor = client.get_ssh_vendor
- client.get_ssh_vendor = lambda: self.server
-
- self.client = SSHGitClient("git.samba.org")
-
- def tearDown(self):
- super().tearDown()
- client.get_ssh_vendor = self.real_vendor
-
- def test_get_url(self):
- path = "/tmp/repo.git"
- c = SSHGitClient("git.samba.org")
-
- url = c.get_url(path)
- self.assertEqual("ssh://git.samba.org/tmp/repo.git", url)
-
- def test_get_url_with_username_and_port(self):
- path = "/tmp/repo.git"
- c = SSHGitClient("git.samba.org", port=2222, username="user")
-
- url = c.get_url(path)
- self.assertEqual("ssh://user@git.samba.org:2222/tmp/repo.git", url)
-
- def test_default_command(self):
- self.assertEqual(b"git-upload-pack", self.client._get_cmd_path(b"upload-pack"))
-
- def test_alternative_command_path(self):
- self.client.alternative_paths[b"upload-pack"] = b"/usr/lib/git/git-upload-pack"
- self.assertEqual(
- b"/usr/lib/git/git-upload-pack",
- self.client._get_cmd_path(b"upload-pack"),
- )
-
- def test_alternative_command_path_spaces(self):
- self.client.alternative_paths[b"upload-pack"] = (
- b"/usr/lib/git/git-upload-pack -ibla"
- )
- self.assertEqual(
- b"/usr/lib/git/git-upload-pack -ibla",
- self.client._get_cmd_path(b"upload-pack"),
- )
-
- def test_connect(self):
- server = self.server
- client = self.client
-
- client.username = b"username"
- client.port = 1337
-
- client._connect(b"command", b"/path/to/repo")
- self.assertEqual(b"username", server.username)
- self.assertEqual(1337, server.port)
- self.assertEqual("git-command '/path/to/repo'", server.command)
-
- client._connect(b"relative-command", b"/~/path/to/repo")
- self.assertEqual("git-relative-command '~/path/to/repo'", server.command)
-
- def test_ssh_command_precedence(self):
- self.overrideEnv("GIT_SSH", "/path/to/ssh")
- test_client = SSHGitClient("git.samba.org")
- self.assertEqual(test_client.ssh_command, "/path/to/ssh")
-
- self.overrideEnv("GIT_SSH_COMMAND", "/path/to/ssh -o Option=Value")
- test_client = SSHGitClient("git.samba.org")
- self.assertEqual(test_client.ssh_command, "/path/to/ssh -o Option=Value")
-
- test_client = SSHGitClient("git.samba.org", ssh_command="ssh -o Option1=Value1")
- self.assertEqual(test_client.ssh_command, "ssh -o Option1=Value1")
-
-
-class ReportStatusParserTests(TestCase):
- def test_invalid_pack(self):
- parser = ReportStatusParser()
- parser.handle_packet(b"unpack error - foo bar")
- parser.handle_packet(b"ok refs/foo/bar")
- parser.handle_packet(None)
- self.assertRaises(SendPackError, list, parser.check())
-
- def test_update_refs_error(self):
- parser = ReportStatusParser()
- parser.handle_packet(b"unpack ok")
- parser.handle_packet(b"ng refs/foo/bar need to pull")
- parser.handle_packet(None)
- self.assertEqual([(b"refs/foo/bar", "need to pull")], list(parser.check()))
-
- def test_ok(self):
- parser = ReportStatusParser()
- parser.handle_packet(b"unpack ok")
- parser.handle_packet(b"ok refs/foo/bar")
- parser.handle_packet(None)
- self.assertEqual([(b"refs/foo/bar", None)], list(parser.check()))
-
-
-class LocalGitClientTests(TestCase):
- def test_get_url(self):
- path = "/tmp/repo.git"
- c = LocalGitClient()
-
- url = c.get_url(path)
- self.assertEqual("file:///tmp/repo.git", url)
-
- def test_fetch_into_empty(self):
- c = LocalGitClient()
- target = tempfile.mkdtemp()
- self.addCleanup(shutil.rmtree, target)
- t = Repo.init_bare(target)
- self.addCleanup(t.close)
- s = open_repo("a.git")
- self.addCleanup(tear_down_repo, s)
- self.assertEqual(s.get_refs(), c.fetch(s.path, t).refs)
-
- def test_clone(self):
- c = LocalGitClient()
- s = open_repo("a.git")
- self.addCleanup(tear_down_repo, s)
- target = tempfile.mkdtemp()
- self.addCleanup(shutil.rmtree, target)
- result_repo = c.clone(s.path, target, mkdir=False)
- self.addCleanup(result_repo.close)
- expected = dict(s.get_refs())
- expected[b"refs/remotes/origin/HEAD"] = expected[b"HEAD"]
- expected[b"refs/remotes/origin/master"] = expected[b"refs/heads/master"]
- self.assertEqual(expected, result_repo.get_refs())
-
- def test_fetch_empty(self):
- c = LocalGitClient()
- s = open_repo("a.git")
- self.addCleanup(tear_down_repo, s)
- out = BytesIO()
- walker = {}
- ret = c.fetch_pack(
- s.path, lambda heads, **kwargs: [], graph_walker=walker, pack_data=out.write
- )
- self.assertEqual(
- {
- b"HEAD": b"a90fa2d900a17e99b433217e988c4eb4a2e9a097",
- b"refs/heads/master": b"a90fa2d900a17e99b433217e988c4eb4a2e9a097",
- b"refs/tags/mytag": b"28237f4dc30d0d462658d6b937b08a0f0b6ef55a",
- b"refs/tags/mytag-packed": b"b0931cadc54336e78a1d980420e3268903b57a50",
- },
- ret.refs,
- )
- self.assertEqual({b"HEAD": b"refs/heads/master"}, ret.symrefs)
- self.assertEqual(
- b"PACK\x00\x00\x00\x02\x00\x00\x00\x00\x02\x9d\x08"
- b"\x82;\xd8\xa8\xea\xb5\x10\xadj\xc7\\\x82<\xfd>\xd3\x1e",
- out.getvalue(),
- )
-
- def test_fetch_pack_none(self):
- c = LocalGitClient()
- s = open_repo("a.git")
- self.addCleanup(tear_down_repo, s)
- out = BytesIO()
- walker = MemoryRepo().get_graph_walker()
- ret = c.fetch_pack(
- s.path,
- lambda heads, **kwargs: [b"a90fa2d900a17e99b433217e988c4eb4a2e9a097"],
- graph_walker=walker,
- pack_data=out.write,
- )
- self.assertEqual({b"HEAD": b"refs/heads/master"}, ret.symrefs)
- self.assertEqual(
- {
- b"HEAD": b"a90fa2d900a17e99b433217e988c4eb4a2e9a097",
- b"refs/heads/master": b"a90fa2d900a17e99b433217e988c4eb4a2e9a097",
- b"refs/tags/mytag": b"28237f4dc30d0d462658d6b937b08a0f0b6ef55a",
- b"refs/tags/mytag-packed": b"b0931cadc54336e78a1d980420e3268903b57a50",
- },
- ret.refs,
- )
- # Hardcoding is not ideal, but we'll fix that some other day..
- self.assertTrue(
- out.getvalue().startswith(b"PACK\x00\x00\x00\x02\x00\x00\x00\x07")
- )
-
- def test_send_pack_without_changes(self):
- local = open_repo("a.git")
- self.addCleanup(tear_down_repo, local)
-
- target = open_repo("a.git")
- self.addCleanup(tear_down_repo, target)
-
- self.send_and_verify(b"master", local, target)
-
- def test_send_pack_with_changes(self):
- local = open_repo("a.git")
- self.addCleanup(tear_down_repo, local)
-
- target_path = tempfile.mkdtemp()
- self.addCleanup(shutil.rmtree, target_path)
- with Repo.init_bare(target_path) as target:
- self.send_and_verify(b"master", local, target)
-
- def test_get_refs(self):
- local = open_repo("refs.git")
- self.addCleanup(tear_down_repo, local)
-
- client = LocalGitClient()
- refs = client.get_refs(local.path)
- self.assertDictEqual(local.refs.as_dict(), refs)
-
- def send_and_verify(self, branch, local, target):
- """Send branch from local to remote repository and verify it worked."""
- client = LocalGitClient()
- ref_name = b"refs/heads/" + branch
- result = client.send_pack(
- target.path,
- lambda _: {ref_name: local.refs[ref_name]},
- local.generate_pack_data,
- )
-
- self.assertEqual(local.refs[ref_name], result.refs[ref_name])
- self.assertIs(None, result.agent)
- self.assertEqual({}, result.ref_status)
-
- obj_local = local.get_object(result.refs[ref_name])
- obj_target = target.get_object(result.refs[ref_name])
- self.assertEqual(obj_local, obj_target)
-
-
-class HttpGitClientTests(TestCase):
- def test_get_url(self):
- base_url = "https://github.com/jelmer/dulwich"
- path = "/jelmer/dulwich"
- c = HttpGitClient(base_url)
-
- url = c.get_url(path)
- self.assertEqual("https://github.com/jelmer/dulwich", url)
-
- def test_get_url_bytes_path(self):
- base_url = "https://github.com/jelmer/dulwich"
- path_bytes = b"/jelmer/dulwich"
- c = HttpGitClient(base_url)
-
- url = c.get_url(path_bytes)
- self.assertEqual("https://github.com/jelmer/dulwich", url)
-
- def test_get_url_with_username_and_passwd(self):
- base_url = "https://github.com/jelmer/dulwich"
- path = "/jelmer/dulwich"
- c = HttpGitClient(base_url, username="USERNAME", password="PASSWD")
-
- url = c.get_url(path)
- self.assertEqual("https://github.com/jelmer/dulwich", url)
-
- def test_init_username_passwd_set(self):
- url = "https://github.com/jelmer/dulwich"
-
- c = HttpGitClient(url, config=None, username="user", password="passwd")
- self.assertEqual("user", c._username)
- self.assertEqual("passwd", c._password)
-
- basic_auth = c.pool_manager.headers["authorization"]
- auth_string = "{}:{}".format("user", "passwd")
- b64_credentials = base64.b64encode(auth_string.encode("latin1"))
- expected_basic_auth = "Basic %s" % b64_credentials.decode("latin1")
- self.assertEqual(basic_auth, expected_basic_auth)
-
- def test_init_username_set_no_password(self):
- url = "https://github.com/jelmer/dulwich"
-
- c = HttpGitClient(url, config=None, username="user")
- self.assertEqual("user", c._username)
- self.assertIsNone(c._password)
-
- basic_auth = c.pool_manager.headers["authorization"]
- auth_string = b"user:"
- b64_credentials = base64.b64encode(auth_string)
- expected_basic_auth = f"Basic {b64_credentials.decode('ascii')}"
- self.assertEqual(basic_auth, expected_basic_auth)
-
- def test_init_no_username_passwd(self):
- url = "https://github.com/jelmer/dulwich"
-
- c = HttpGitClient(url, config=None)
- self.assertIs(None, c._username)
- self.assertIs(None, c._password)
- self.assertNotIn("authorization", c.pool_manager.headers)
-
- def test_from_parsedurl_username_only(self):
- username = "user"
- url = f"https://{username}@github.com/jelmer/dulwich"
-
- c = HttpGitClient.from_parsedurl(urlparse(url))
- self.assertEqual(c._username, username)
- self.assertEqual(c._password, None)
-
- basic_auth = c.pool_manager.headers["authorization"]
- auth_string = username.encode("ascii") + b":"
- b64_credentials = base64.b64encode(auth_string)
- expected_basic_auth = f"Basic {b64_credentials.decode('ascii')}"
- self.assertEqual(basic_auth, expected_basic_auth)
-
- def test_from_parsedurl_on_url_with_quoted_credentials(self):
- original_username = "john|the|first"
- quoted_username = urlquote(original_username)
-
- original_password = "Ya#1$2%3"
- quoted_password = urlquote(original_password)
-
- url = f"https://{quoted_username}:{quoted_password}@github.com/jelmer/dulwich"
-
- c = HttpGitClient.from_parsedurl(urlparse(url))
- self.assertEqual(original_username, c._username)
- self.assertEqual(original_password, c._password)
-
- basic_auth = c.pool_manager.headers["authorization"]
- auth_string = f"{original_username}:{original_password}"
- b64_credentials = base64.b64encode(auth_string.encode("latin1"))
- expected_basic_auth = "Basic %s" % b64_credentials.decode("latin1")
- self.assertEqual(basic_auth, expected_basic_auth)
-
- def test_url_redirect_location(self):
- from urllib3.response import HTTPResponse
-
- test_data = {
- "https://gitlab.com/inkscape/inkscape/": {
- "location": "https://gitlab.com/inkscape/inkscape.git/",
- "redirect_url": "https://gitlab.com/inkscape/inkscape.git/",
- "refs_data": (
- b"001e# service=git-upload-pack\n00000032"
- b"fb2bebf4919a011f0fd7cec085443d0031228e76 "
- b"HEAD\n0000"
- ),
- },
- "https://github.com/jelmer/dulwich/": {
- "location": "https://github.com/jelmer/dulwich/",
- "redirect_url": "https://github.com/jelmer/dulwich/",
- "refs_data": (
- b"001e# service=git-upload-pack\n00000032"
- b"3ff25e09724aa4d86ea5bca7d5dd0399a3c8bfcf "
- b"HEAD\n0000"
- ),
- },
- # check for absolute-path URI reference as location
- "https://codeberg.org/ashwinvis/radicale-sh.git/": {
- "location": "/ashwinvis/radicale-auth-sh/",
- "redirect_url": "https://codeberg.org/ashwinvis/radicale-auth-sh/",
- "refs_data": (
- b"001e# service=git-upload-pack\n00000032"
- b"470f8603768b608fc988675de2fae8f963c21158 "
- b"HEAD\n0000"
- ),
- },
- }
-
- tail = "info/refs?service=git-upload-pack"
-
- # we need to mock urllib3.PoolManager as this test will fail
- # otherwise without an active internet connection
- class PoolManagerMock:
- def __init__(self) -> None:
- self.headers: Dict[str, str] = {}
-
- def request(
- self,
- method,
- url,
- fields=None,
- headers=None,
- redirect=True,
- preload_content=True,
- ):
- base_url = url[: -len(tail)]
- redirect_base_url = test_data[base_url]["location"]
- redirect_url = redirect_base_url + tail
- headers = {
- "Content-Type": "application/x-git-upload-pack-advertisement"
- }
- body = test_data[base_url]["refs_data"]
- # urllib3 handles automatic redirection by default
- status = 200
- request_url = redirect_url
- # simulate urllib3 behavior when redirect parameter is False
- if redirect is False:
- request_url = url
- if redirect_base_url != base_url:
- body = b""
- headers["location"] = test_data[base_url]["location"]
- status = 301
-
- return HTTPResponse(
- body=BytesIO(body),
- headers=headers,
- request_method=method,
- request_url=request_url,
- preload_content=preload_content,
- status=status,
- )
-
- pool_manager = PoolManagerMock()
-
- for base_url in test_data.keys():
- # instantiate HttpGitClient with mocked pool manager
- c = HttpGitClient(base_url, pool_manager=pool_manager, config=None)
- # call method that detects url redirection
- _, _, processed_url = c._discover_references(b"git-upload-pack", base_url)
-
- # send the same request as the method above without redirection
- resp = c.pool_manager.request("GET", base_url + tail, redirect=False)
-
- # check expected behavior of urllib3
- redirect_location = resp.get_redirect_location()
-
- if resp.status == 200:
- self.assertFalse(redirect_location)
-
- if redirect_location:
- # check that url redirection has been correctly detected
- self.assertEqual(processed_url, test_data[base_url]["redirect_url"])
- else:
- # check also the no redirection case
- self.assertEqual(processed_url, base_url)
-
- def test_smart_request_content_type_with_directive_check(self):
- from urllib3.response import HTTPResponse
-
- # we need to mock urllib3.PoolManager as this test will fail
- # otherwise without an active internet connection
- class PoolManagerMock:
- def __init__(self) -> None:
- self.headers: Dict[str, str] = {}
-
- def request(
- self,
- method,
- url,
- fields=None,
- headers=None,
- redirect=True,
- preload_content=True,
- ):
- return HTTPResponse(
- headers={
- "Content-Type": "application/x-git-upload-pack-result; charset=utf-8"
- },
- request_method=method,
- request_url=url,
- preload_content=preload_content,
- status=200,
- )
-
- clone_url = "https://hacktivis.me/git/blog.git/"
- client = HttpGitClient(clone_url, pool_manager=PoolManagerMock(), config=None)
- self.assertTrue(client._smart_request("git-upload-pack", clone_url, data=None))
-
-
-class TCPGitClientTests(TestCase):
- def test_get_url(self):
- host = "github.com"
- path = "/jelmer/dulwich"
- c = TCPGitClient(host)
-
- url = c.get_url(path)
- self.assertEqual("git://github.com/jelmer/dulwich", url)
-
- def test_get_url_with_port(self):
- host = "github.com"
- path = "/jelmer/dulwich"
- port = 9090
- c = TCPGitClient(host, port=port)
-
- url = c.get_url(path)
- self.assertEqual("git://github.com:9090/jelmer/dulwich", url)
-
-
-class DefaultUrllib3ManagerTest(TestCase):
- def test_no_config(self):
- manager = default_urllib3_manager(config=None)
- self.assertEqual(manager.connection_pool_kw["cert_reqs"], "CERT_REQUIRED")
-
- def test_config_no_proxy(self):
- import urllib3
-
- manager = default_urllib3_manager(config=ConfigDict())
- self.assertNotIsInstance(manager, urllib3.ProxyManager)
- self.assertIsInstance(manager, urllib3.PoolManager)
-
- def test_config_no_proxy_custom_cls(self):
- import urllib3
-
- class CustomPoolManager(urllib3.PoolManager):
- pass
-
- manager = default_urllib3_manager(
- config=ConfigDict(), pool_manager_cls=CustomPoolManager
- )
- self.assertIsInstance(manager, CustomPoolManager)
-
- def test_config_ssl(self):
- config = ConfigDict()
- config.set(b"http", b"sslVerify", b"true")
- manager = default_urllib3_manager(config=config)
- self.assertEqual(manager.connection_pool_kw["cert_reqs"], "CERT_REQUIRED")
-
- def test_config_no_ssl(self):
- config = ConfigDict()
- config.set(b"http", b"sslVerify", b"false")
- manager = default_urllib3_manager(config=config)
- self.assertEqual(manager.connection_pool_kw["cert_reqs"], "CERT_NONE")
-
- def test_config_proxy(self):
- import urllib3
-
- config = ConfigDict()
- config.set(b"http", b"proxy", b"http://localhost:3128/")
- manager = default_urllib3_manager(config=config)
-
- self.assertIsInstance(manager, urllib3.ProxyManager)
- self.assertTrue(hasattr(manager, "proxy"))
- self.assertEqual(manager.proxy.scheme, "http")
- self.assertEqual(manager.proxy.host, "localhost")
- self.assertEqual(manager.proxy.port, 3128)
-
- def test_environment_proxy(self):
- import urllib3
-
- config = ConfigDict()
- self.overrideEnv("http_proxy", "http://myproxy:8080")
- manager = default_urllib3_manager(config=config)
- self.assertIsInstance(manager, urllib3.ProxyManager)
- self.assertTrue(hasattr(manager, "proxy"))
- self.assertEqual(manager.proxy.scheme, "http")
- self.assertEqual(manager.proxy.host, "myproxy")
- self.assertEqual(manager.proxy.port, 8080)
-
- def test_environment_empty_proxy(self):
- import urllib3
-
- config = ConfigDict()
- self.overrideEnv("http_proxy", "")
- manager = default_urllib3_manager(config=config)
- self.assertNotIsInstance(manager, urllib3.ProxyManager)
- self.assertIsInstance(manager, urllib3.PoolManager)
-
- def test_environment_no_proxy_1(self):
- import urllib3
-
- config = ConfigDict()
- self.overrideEnv("http_proxy", "http://myproxy:8080")
- self.overrideEnv("no_proxy", "xyz,abc.def.gh,abc.gh")
- base_url = "http://xyz.abc.def.gh:8080/path/port"
- manager = default_urllib3_manager(config=config, base_url=base_url)
- self.assertNotIsInstance(manager, urllib3.ProxyManager)
- self.assertIsInstance(manager, urllib3.PoolManager)
-
- def test_environment_no_proxy_2(self):
- import urllib3
-
- config = ConfigDict()
- self.overrideEnv("http_proxy", "http://myproxy:8080")
- self.overrideEnv("no_proxy", "xyz,abc.def.gh,abc.gh,ample.com")
- base_url = "http://ample.com/path/port"
- manager = default_urllib3_manager(config=config, base_url=base_url)
- self.assertNotIsInstance(manager, urllib3.ProxyManager)
- self.assertIsInstance(manager, urllib3.PoolManager)
-
- def test_environment_no_proxy_3(self):
- import urllib3
-
- config = ConfigDict()
- self.overrideEnv("http_proxy", "http://myproxy:8080")
- self.overrideEnv("no_proxy", "xyz,abc.def.gh,abc.gh,ample.com")
- base_url = "http://ample.com:80/path/port"
- manager = default_urllib3_manager(config=config, base_url=base_url)
- self.assertNotIsInstance(manager, urllib3.ProxyManager)
- self.assertIsInstance(manager, urllib3.PoolManager)
-
- def test_environment_no_proxy_4(self):
- import urllib3
-
- config = ConfigDict()
- self.overrideEnv("http_proxy", "http://myproxy:8080")
- self.overrideEnv("no_proxy", "xyz,abc.def.gh,abc.gh,ample.com")
- base_url = "http://www.ample.com/path/port"
- manager = default_urllib3_manager(config=config, base_url=base_url)
- self.assertNotIsInstance(manager, urllib3.ProxyManager)
- self.assertIsInstance(manager, urllib3.PoolManager)
-
- def test_environment_no_proxy_5(self):
- import urllib3
-
- config = ConfigDict()
- self.overrideEnv("http_proxy", "http://myproxy:8080")
- self.overrideEnv("no_proxy", "xyz,abc.def.gh,abc.gh,ample.com")
- base_url = "http://www.example.com/path/port"
- manager = default_urllib3_manager(config=config, base_url=base_url)
- self.assertIsInstance(manager, urllib3.ProxyManager)
- self.assertTrue(hasattr(manager, "proxy"))
- self.assertEqual(manager.proxy.scheme, "http")
- self.assertEqual(manager.proxy.host, "myproxy")
- self.assertEqual(manager.proxy.port, 8080)
-
- def test_environment_no_proxy_6(self):
- import urllib3
-
- config = ConfigDict()
- self.overrideEnv("http_proxy", "http://myproxy:8080")
- self.overrideEnv("no_proxy", "xyz,abc.def.gh,abc.gh,ample.com")
- base_url = "http://ample.com.org/path/port"
- manager = default_urllib3_manager(config=config, base_url=base_url)
- self.assertIsInstance(manager, urllib3.ProxyManager)
- self.assertTrue(hasattr(manager, "proxy"))
- self.assertEqual(manager.proxy.scheme, "http")
- self.assertEqual(manager.proxy.host, "myproxy")
- self.assertEqual(manager.proxy.port, 8080)
-
- def test_environment_no_proxy_ipv4_address_1(self):
- import urllib3
-
- config = ConfigDict()
- self.overrideEnv("http_proxy", "http://myproxy:8080")
- self.overrideEnv("no_proxy", "xyz,abc.def.gh,192.168.0.10,ample.com")
- base_url = "http://192.168.0.10/path/port"
- manager = default_urllib3_manager(config=config, base_url=base_url)
- self.assertNotIsInstance(manager, urllib3.ProxyManager)
- self.assertIsInstance(manager, urllib3.PoolManager)
-
- def test_environment_no_proxy_ipv4_address_2(self):
- import urllib3
-
- config = ConfigDict()
- self.overrideEnv("http_proxy", "http://myproxy:8080")
- self.overrideEnv("no_proxy", "xyz,abc.def.gh,192.168.0.10,ample.com")
- base_url = "http://192.168.0.10:8888/path/port"
- manager = default_urllib3_manager(config=config, base_url=base_url)
- self.assertNotIsInstance(manager, urllib3.ProxyManager)
- self.assertIsInstance(manager, urllib3.PoolManager)
-
- def test_environment_no_proxy_ipv4_address_3(self):
- import urllib3
-
- config = ConfigDict()
- self.overrideEnv("http_proxy", "http://myproxy:8080")
- self.overrideEnv(
- "no_proxy", "xyz,abc.def.gh,ff80:1::/64,192.168.0.0/24,ample.com"
- )
- base_url = "http://192.168.0.10/path/port"
- manager = default_urllib3_manager(config=config, base_url=base_url)
- self.assertNotIsInstance(manager, urllib3.ProxyManager)
- self.assertIsInstance(manager, urllib3.PoolManager)
-
- def test_environment_no_proxy_ipv6_address_1(self):
- import urllib3
-
- config = ConfigDict()
- self.overrideEnv("http_proxy", "http://myproxy:8080")
- self.overrideEnv("no_proxy", "xyz,abc.def.gh,ff80:1::affe,ample.com")
- base_url = "http://[ff80:1::affe]/path/port"
- manager = default_urllib3_manager(config=config, base_url=base_url)
- self.assertNotIsInstance(manager, urllib3.ProxyManager)
- self.assertIsInstance(manager, urllib3.PoolManager)
-
- def test_environment_no_proxy_ipv6_address_2(self):
- import urllib3
-
- config = ConfigDict()
- self.overrideEnv("http_proxy", "http://myproxy:8080")
- self.overrideEnv("no_proxy", "xyz,abc.def.gh,ff80:1::affe,ample.com")
- base_url = "http://[ff80:1::affe]:1234/path/port"
- manager = default_urllib3_manager(config=config, base_url=base_url)
- self.assertNotIsInstance(manager, urllib3.ProxyManager)
- self.assertIsInstance(manager, urllib3.PoolManager)
-
- def test_environment_no_proxy_ipv6_address_3(self):
- import urllib3
-
- config = ConfigDict()
- self.overrideEnv("http_proxy", "http://myproxy:8080")
- self.overrideEnv(
- "no_proxy", "xyz,abc.def.gh,192.168.0.0/24,ff80:1::/64,ample.com"
- )
- base_url = "http://[ff80:1::affe]/path/port"
- manager = default_urllib3_manager(config=config, base_url=base_url)
- self.assertNotIsInstance(manager, urllib3.ProxyManager)
- self.assertIsInstance(manager, urllib3.PoolManager)
-
- def test_config_proxy_custom_cls(self):
- import urllib3
-
- class CustomProxyManager(urllib3.ProxyManager):
- pass
-
- config = ConfigDict()
- config.set(b"http", b"proxy", b"http://localhost:3128/")
- manager = default_urllib3_manager(
- config=config, proxy_manager_cls=CustomProxyManager
- )
- self.assertIsInstance(manager, CustomProxyManager)
-
- def test_config_proxy_creds(self):
- import urllib3
-
- config = ConfigDict()
- config.set(b"http", b"proxy", b"http://jelmer:example@localhost:3128/")
- manager = default_urllib3_manager(config=config)
- assert isinstance(manager, urllib3.ProxyManager)
- self.assertEqual(
- manager.proxy_headers, {"proxy-authorization": "Basic amVsbWVyOmV4YW1wbGU="}
- )
-
- def test_config_no_verify_ssl(self):
- manager = default_urllib3_manager(config=None, cert_reqs="CERT_NONE")
- self.assertEqual(manager.connection_pool_kw["cert_reqs"], "CERT_NONE")
-
-
-class SubprocessSSHVendorTests(TestCase):
- def setUp(self):
- # Monkey Patch client subprocess popen
- self._orig_popen = dulwich.client.subprocess.Popen
- dulwich.client.subprocess.Popen = DummyPopen
-
- def tearDown(self):
- dulwich.client.subprocess.Popen = self._orig_popen
-
- def test_run_command_dashes(self):
- vendor = SubprocessSSHVendor()
- self.assertRaises(
- StrangeHostname,
- vendor.run_command,
- "--weird-host",
- "git-clone-url",
- )
-
- def test_run_command_password(self):
- vendor = SubprocessSSHVendor()
- self.assertRaises(
- NotImplementedError,
- vendor.run_command,
- "host",
- "git-clone-url",
- password="12345",
- )
-
- def test_run_command_password_and_privkey(self):
- vendor = SubprocessSSHVendor()
- self.assertRaises(
- NotImplementedError,
- vendor.run_command,
- "host",
- "git-clone-url",
- password="12345",
- key_filename="/tmp/id_rsa",
- )
-
- def test_run_command_with_port_username_and_privkey(self):
- expected = [
- "ssh",
- "-x",
- "-p",
- "2200",
- "-i",
- "/tmp/id_rsa",
- "user@host",
- "git-clone-url",
- ]
-
- vendor = SubprocessSSHVendor()
- command = vendor.run_command(
- "host",
- "git-clone-url",
- username="user",
- port="2200",
- key_filename="/tmp/id_rsa",
- )
-
- args = command.proc.args
-
- self.assertListEqual(expected, args[0])
-
- def test_run_with_ssh_command(self):
- expected = [
- "/path/to/ssh",
- "-o",
- "Option=Value",
- "-x",
- "host",
- "git-clone-url",
- ]
-
- vendor = SubprocessSSHVendor()
- command = vendor.run_command(
- "host",
- "git-clone-url",
- ssh_command="/path/to/ssh -o Option=Value",
- )
-
- args = command.proc.args
- self.assertListEqual(expected, args[0])
-
-
-class PLinkSSHVendorTests(TestCase):
- def setUp(self):
- # Monkey Patch client subprocess popen
- self._orig_popen = dulwich.client.subprocess.Popen
- dulwich.client.subprocess.Popen = DummyPopen
-
- def tearDown(self):
- dulwich.client.subprocess.Popen = self._orig_popen
-
- def test_run_command_dashes(self):
- vendor = PLinkSSHVendor()
- self.assertRaises(
- StrangeHostname,
- vendor.run_command,
- "--weird-host",
- "git-clone-url",
- )
-
- def test_run_command_password_and_privkey(self):
- vendor = PLinkSSHVendor()
-
- warnings.simplefilter("always", UserWarning)
- self.addCleanup(warnings.resetwarnings)
- warnings_list, restore_warnings = setup_warning_catcher()
- self.addCleanup(restore_warnings)
-
- command = vendor.run_command(
- "host",
- "git-clone-url",
- password="12345",
- key_filename="/tmp/id_rsa",
- )
-
- expected_warning = UserWarning(
- "Invoking PLink with a password exposes the password in the "
- "process list."
- )
-
- for w in warnings_list:
- if type(w) is type(expected_warning) and w.args == expected_warning.args:
- break
- else:
- raise AssertionError(
- f"Expected warning {expected_warning!r} not in {warnings_list!r}"
- )
-
- args = command.proc.args
-
- if sys.platform == "win32":
- binary = ["plink.exe", "-ssh"]
- else:
- binary = ["plink", "-ssh"]
- expected = [
- *binary,
- "-pw",
- "12345",
- "-i",
- "/tmp/id_rsa",
- "host",
- "git-clone-url",
- ]
- self.assertListEqual(expected, args[0])
-
- def test_run_command_password(self):
- if sys.platform == "win32":
- binary = ["plink.exe", "-ssh"]
- else:
- binary = ["plink", "-ssh"]
- expected = [*binary, "-pw", "12345", "host", "git-clone-url"]
-
- vendor = PLinkSSHVendor()
-
- warnings.simplefilter("always", UserWarning)
- self.addCleanup(warnings.resetwarnings)
- warnings_list, restore_warnings = setup_warning_catcher()
- self.addCleanup(restore_warnings)
-
- command = vendor.run_command("host", "git-clone-url", password="12345")
-
- expected_warning = UserWarning(
- "Invoking PLink with a password exposes the password in the "
- "process list."
- )
-
- for w in warnings_list:
- if type(w) is type(expected_warning) and w.args == expected_warning.args:
- break
- else:
- raise AssertionError(
- f"Expected warning {expected_warning!r} not in {warnings_list!r}"
- )
-
- args = command.proc.args
-
- self.assertListEqual(expected, args[0])
-
- def test_run_command_with_port_username_and_privkey(self):
- if sys.platform == "win32":
- binary = ["plink.exe", "-ssh"]
- else:
- binary = ["plink", "-ssh"]
- expected = [
- *binary,
- "-P",
- "2200",
- "-i",
- "/tmp/id_rsa",
- "user@host",
- "git-clone-url",
- ]
-
- vendor = PLinkSSHVendor()
- command = vendor.run_command(
- "host",
- "git-clone-url",
- username="user",
- port="2200",
- key_filename="/tmp/id_rsa",
- )
-
- args = command.proc.args
-
- self.assertListEqual(expected, args[0])
-
- def test_run_with_ssh_command(self):
- expected = [
- "/path/to/plink",
- "-x",
- "host",
- "git-clone-url",
- ]
-
- vendor = SubprocessSSHVendor()
- command = vendor.run_command(
- "host",
- "git-clone-url",
- ssh_command="/path/to/plink",
- )
-
- args = command.proc.args
- self.assertListEqual(expected, args[0])
-
-
-class RsyncUrlTests(TestCase):
- def test_simple(self):
- self.assertEqual(parse_rsync_url("foo:bar/path"), (None, "foo", "bar/path"))
- self.assertEqual(
- parse_rsync_url("user@foo:bar/path"), ("user", "foo", "bar/path")
- )
-
- def test_path(self):
- self.assertRaises(ValueError, parse_rsync_url, "/path")
-
-
-class CheckWantsTests(TestCase):
- def test_fine(self):
- check_wants(
- [b"2f3dc7a53fb752a6961d3a56683df46d4d3bf262"],
- {b"refs/heads/blah": b"2f3dc7a53fb752a6961d3a56683df46d4d3bf262"},
- )
-
- def test_missing(self):
- self.assertRaises(
- InvalidWants,
- check_wants,
- [b"2f3dc7a53fb752a6961d3a56683df46d4d3bf262"],
- {b"refs/heads/blah": b"3f3dc7a53fb752a6961d3a56683df46d4d3bf262"},
- )
-
- def test_annotated(self):
- self.assertRaises(
- InvalidWants,
- check_wants,
- [b"2f3dc7a53fb752a6961d3a56683df46d4d3bf262"],
- {
- b"refs/heads/blah": b"3f3dc7a53fb752a6961d3a56683df46d4d3bf262",
- b"refs/heads/blah^{}": b"2f3dc7a53fb752a6961d3a56683df46d4d3bf262",
- },
- )
-
-
-class FetchPackResultTests(TestCase):
- def test_eq(self):
- self.assertEqual(
- FetchPackResult(
- {b"refs/heads/master": b"2f3dc7a53fb752a6961d3a56683df46d4d3bf262"},
- {},
- b"user/agent",
- ),
- FetchPackResult(
- {b"refs/heads/master": b"2f3dc7a53fb752a6961d3a56683df46d4d3bf262"},
- {},
- b"user/agent",
- ),
- )
-
-
-class GitCredentialStoreTests(TestCase):
- @classmethod
- def setUpClass(cls):
- with tempfile.NamedTemporaryFile(delete=False) as f:
- f.write(b"https://user:pass@example.org\n")
- cls.fname = f.name
-
- @classmethod
- def tearDownClass(cls):
- os.unlink(cls.fname)
-
- def test_nonmatching_scheme(self):
- self.assertEqual(
- get_credentials_from_store(b"http", b"example.org", fnames=[self.fname]),
- None,
- )
-
- def test_nonmatching_hostname(self):
- self.assertEqual(
- get_credentials_from_store(b"https", b"noentry.org", fnames=[self.fname]),
- None,
- )
-
- def test_match_without_username(self):
- self.assertEqual(
- get_credentials_from_store(b"https", b"example.org", fnames=[self.fname]),
- (b"user", b"pass"),
- )
-
- def test_match_with_matching_username(self):
- self.assertEqual(
- get_credentials_from_store(
- b"https", b"example.org", b"user", fnames=[self.fname]
- ),
- (b"user", b"pass"),
- )
-
- def test_no_match_with_nonmatching_username(self):
- self.assertEqual(
- get_credentials_from_store(
- b"https", b"example.org", b"otheruser", fnames=[self.fname]
- ),
- None,
- )
-
-
-class RemoteErrorFromStderrTests(TestCase):
- def test_nothing(self):
- self.assertEqual(_remote_error_from_stderr(None), HangupException())
-
- def test_error_line(self):
- b = BytesIO(
- b"""\
-This is some random output.
-ERROR: This is the actual error
-with a tail
-"""
- )
- self.assertEqual(
- _remote_error_from_stderr(b),
- GitProtocolError("This is the actual error"),
- )
-
- def test_no_error_line(self):
- b = BytesIO(
- b"""\
-This is output without an error line.
-And this line is just random noise, too.
-"""
- )
- self.assertEqual(
- _remote_error_from_stderr(b),
- HangupException(
- [
- b"This is output without an error line.",
- b"And this line is just random noise, too.",
- ]
- ),
- )
blob - 7857bb134cbe6f5e9951c618169b06c470fd53e5 (mode 644)
blob + /dev/null
--- dulwich/tests/test_config.py
+++ /dev/null
-# test_config.py -- Tests for reading and writing configuration files
-# Copyright (C) 2011 Jelmer Vernooij <jelmer@jelmer.uk>
-#
-# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
-# General Public License as public by the Free Software Foundation; version 2.0
-# or (at your option) any later version. You can redistribute it and/or
-# modify it under the terms of either of these two licenses.
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# You should have received a copy of the licenses; if not, see
-# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
-# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
-# License, Version 2.0.
-#
-
-"""Tests for reading and writing configuration files."""
-
-import os
-import sys
-from io import BytesIO
-from unittest import skipIf
-from unittest.mock import patch
-
-from dulwich.tests import TestCase
-
-from ..config import (
- ConfigDict,
- ConfigFile,
- StackedConfig,
- _check_section_name,
- _check_variable_name,
- _escape_value,
- _format_string,
- _parse_string,
- apply_instead_of,
- parse_submodules,
-)
-
-
-class ConfigFileTests(TestCase):
- def from_file(self, text):
- return ConfigFile.from_file(BytesIO(text))
-
- def test_empty(self):
- ConfigFile()
-
- def test_eq(self):
- self.assertEqual(ConfigFile(), ConfigFile())
-
- def test_default_config(self):
- cf = self.from_file(
- b"""[core]
-\trepositoryformatversion = 0
-\tfilemode = true
-\tbare = false
-\tlogallrefupdates = true
-"""
- )
- self.assertEqual(
- ConfigFile(
- {
- (b"core",): {
- b"repositoryformatversion": b"0",
- b"filemode": b"true",
- b"bare": b"false",
- b"logallrefupdates": b"true",
- }
- }
- ),
- cf,
- )
-
- def test_from_file_empty(self):
- cf = self.from_file(b"")
- self.assertEqual(ConfigFile(), cf)
-
- def test_empty_line_before_section(self):
- cf = self.from_file(b"\n[section]\n")
- self.assertEqual(ConfigFile({(b"section",): {}}), cf)
-
- def test_comment_before_section(self):
- cf = self.from_file(b"# foo\n[section]\n")
- self.assertEqual(ConfigFile({(b"section",): {}}), cf)
-
- def test_comment_after_section(self):
- cf = self.from_file(b"[section] # foo\n")
- self.assertEqual(ConfigFile({(b"section",): {}}), cf)
-
- def test_comment_after_variable(self):
- cf = self.from_file(b"[section]\nbar= foo # a comment\n")
- self.assertEqual(ConfigFile({(b"section",): {b"bar": b"foo"}}), cf)
-
- def test_comment_character_within_value_string(self):
- cf = self.from_file(b'[section]\nbar= "foo#bar"\n')
- self.assertEqual(ConfigFile({(b"section",): {b"bar": b"foo#bar"}}), cf)
-
- def test_comment_character_within_section_string(self):
- cf = self.from_file(b'[branch "foo#bar"] # a comment\nbar= foo\n')
- self.assertEqual(ConfigFile({(b"branch", b"foo#bar"): {b"bar": b"foo"}}), cf)
-
- def test_closing_bracket_within_section_string(self):
- cf = self.from_file(b'[branch "foo]bar"] # a comment\nbar= foo\n')
- self.assertEqual(ConfigFile({(b"branch", b"foo]bar"): {b"bar": b"foo"}}), cf)
-
- def test_from_file_section(self):
- cf = self.from_file(b"[core]\nfoo = bar\n")
- self.assertEqual(b"bar", cf.get((b"core",), b"foo"))
- self.assertEqual(b"bar", cf.get((b"core", b"foo"), b"foo"))
-
- def test_from_file_multiple(self):
- cf = self.from_file(b"[core]\nfoo = bar\nfoo = blah\n")
- self.assertEqual([b"bar", b"blah"], list(cf.get_multivar((b"core",), b"foo")))
- self.assertEqual([], list(cf.get_multivar((b"core",), b"blah")))
-
- def test_from_file_utf8_bom(self):
- text = "[core]\nfoo = b\u00e4r\n".encode("utf-8-sig")
- cf = self.from_file(text)
- self.assertEqual(b"b\xc3\xa4r", cf.get((b"core",), b"foo"))
-
- def test_from_file_section_case_insensitive_lower(self):
- cf = self.from_file(b"[cOre]\nfOo = bar\n")
- self.assertEqual(b"bar", cf.get((b"core",), b"foo"))
- self.assertEqual(b"bar", cf.get((b"core", b"foo"), b"foo"))
-
- def test_from_file_section_case_insensitive_mixed(self):
- cf = self.from_file(b"[cOre]\nfOo = bar\n")
- self.assertEqual(b"bar", cf.get((b"core",), b"fOo"))
- self.assertEqual(b"bar", cf.get((b"cOre", b"fOo"), b"fOo"))
-
- def test_from_file_with_mixed_quoted(self):
- cf = self.from_file(b'[core]\nfoo = "bar"la\n')
- self.assertEqual(b"barla", cf.get((b"core",), b"foo"))
-
- def test_from_file_section_with_open_brackets(self):
- self.assertRaises(ValueError, self.from_file, b"[core\nfoo = bar\n")
-
- def test_from_file_value_with_open_quoted(self):
- self.assertRaises(ValueError, self.from_file, b'[core]\nfoo = "bar\n')
-
- def test_from_file_with_quotes(self):
- cf = self.from_file(b"[core]\n" b'foo = " bar"\n')
- self.assertEqual(b" bar", cf.get((b"core",), b"foo"))
-
- def test_from_file_with_interrupted_line(self):
- cf = self.from_file(b"[core]\n" b"foo = bar\\\n" b" la\n")
- self.assertEqual(b"barla", cf.get((b"core",), b"foo"))
-
- def test_from_file_with_boolean_setting(self):
- cf = self.from_file(b"[core]\n" b"foo\n")
- self.assertEqual(b"true", cf.get((b"core",), b"foo"))
-
- def test_from_file_subsection(self):
- cf = self.from_file(b'[branch "foo"]\nfoo = bar\n')
- self.assertEqual(b"bar", cf.get((b"branch", b"foo"), b"foo"))
-
- def test_from_file_subsection_invalid(self):
- self.assertRaises(ValueError, self.from_file, b'[branch "foo]\nfoo = bar\n')
-
- def test_from_file_subsection_not_quoted(self):
- cf = self.from_file(b"[branch.foo]\nfoo = bar\n")
- self.assertEqual(b"bar", cf.get((b"branch", b"foo"), b"foo"))
-
- def test_write_preserve_multivar(self):
- cf = self.from_file(b"[core]\nfoo = bar\nfoo = blah\n")
- f = BytesIO()
- cf.write_to_file(f)
- self.assertEqual(b"[core]\n\tfoo = bar\n\tfoo = blah\n", f.getvalue())
-
- def test_write_to_file_empty(self):
- c = ConfigFile()
- f = BytesIO()
- c.write_to_file(f)
- self.assertEqual(b"", f.getvalue())
-
- def test_write_to_file_section(self):
- c = ConfigFile()
- c.set((b"core",), b"foo", b"bar")
- f = BytesIO()
- c.write_to_file(f)
- self.assertEqual(b"[core]\n\tfoo = bar\n", f.getvalue())
-
- def test_write_to_file_subsection(self):
- c = ConfigFile()
- c.set((b"branch", b"blie"), b"foo", b"bar")
- f = BytesIO()
- c.write_to_file(f)
- self.assertEqual(b'[branch "blie"]\n\tfoo = bar\n', f.getvalue())
-
- def test_same_line(self):
- cf = self.from_file(b"[branch.foo] foo = bar\n")
- self.assertEqual(b"bar", cf.get((b"branch", b"foo"), b"foo"))
-
- def test_quoted_newlines_windows(self):
- cf = self.from_file(
- b"[alias]\r\n"
- b"c = '!f() { \\\r\n"
- b' printf \'[git commit -m \\"%s\\"]\\n\' \\"$*\\" && \\\r\n'
- b' git commit -m \\"$*\\"; \\\r\n'
- b" }; f'\r\n"
- )
- self.assertEqual(list(cf.sections()), [(b"alias",)])
- self.assertEqual(
- b"'!f() { printf '[git commit -m \"%s\"]\n' " b'"$*" && git commit -m "$*"',
- cf.get((b"alias",), b"c"),
- )
-
- def test_quoted(self):
- cf = self.from_file(
- b"""[gui]
-\tfontdiff = -family \\\"Ubuntu Mono\\\" -size 11 -overstrike 0
-"""
- )
- self.assertEqual(
- ConfigFile(
- {
- (b"gui",): {
- b"fontdiff": b'-family "Ubuntu Mono" -size 11 -overstrike 0',
- }
- }
- ),
- cf,
- )
-
- def test_quoted_multiline(self):
- cf = self.from_file(
- b"""[alias]
-who = \"!who() {\\
- git log --no-merges --pretty=format:'%an - %ae' $@ | uniq -c | sort -rn;\\
-};\\
-who\"
-"""
- )
- self.assertEqual(
- ConfigFile(
- {
- (b"alias",): {
- b"who": (
- b"!who() {git log --no-merges --pretty=format:'%an - "
- b"%ae' $@ | uniq -c | sort -rn;};who"
- )
- }
- }
- ),
- cf,
- )
-
- def test_set_hash_gets_quoted(self):
- c = ConfigFile()
- c.set(b"xandikos", b"color", b"#665544")
- f = BytesIO()
- c.write_to_file(f)
- self.assertEqual(b'[xandikos]\n\tcolor = "#665544"\n', f.getvalue())
-
-
-class ConfigDictTests(TestCase):
- def test_get_set(self):
- cd = ConfigDict()
- self.assertRaises(KeyError, cd.get, b"foo", b"core")
- cd.set((b"core",), b"foo", b"bla")
- self.assertEqual(b"bla", cd.get((b"core",), b"foo"))
- cd.set((b"core",), b"foo", b"bloe")
- self.assertEqual(b"bloe", cd.get((b"core",), b"foo"))
-
- def test_get_boolean(self):
- cd = ConfigDict()
- cd.set((b"core",), b"foo", b"true")
- self.assertTrue(cd.get_boolean((b"core",), b"foo"))
- cd.set((b"core",), b"foo", b"false")
- self.assertFalse(cd.get_boolean((b"core",), b"foo"))
- cd.set((b"core",), b"foo", b"invalid")
- self.assertRaises(ValueError, cd.get_boolean, (b"core",), b"foo")
-
- def test_dict(self):
- cd = ConfigDict()
- cd.set((b"core",), b"foo", b"bla")
- cd.set((b"core2",), b"foo", b"bloe")
-
- self.assertEqual([(b"core",), (b"core2",)], list(cd.keys()))
- self.assertEqual(cd[(b"core",)], {b"foo": b"bla"})
-
- cd[b"a"] = b"b"
- self.assertEqual(cd[b"a"], b"b")
-
- def test_items(self):
- cd = ConfigDict()
- cd.set((b"core",), b"foo", b"bla")
- cd.set((b"core2",), b"foo", b"bloe")
-
- self.assertEqual([(b"foo", b"bla")], list(cd.items((b"core",))))
-
- def test_items_nonexistant(self):
- cd = ConfigDict()
- cd.set((b"core2",), b"foo", b"bloe")
-
- self.assertEqual([], list(cd.items((b"core",))))
-
- def test_sections(self):
- cd = ConfigDict()
- cd.set((b"core2",), b"foo", b"bloe")
-
- self.assertEqual([(b"core2",)], list(cd.sections()))
-
-
-class StackedConfigTests(TestCase):
- def test_default_backends(self):
- StackedConfig.default_backends()
-
- @skipIf(sys.platform != "win32", "Windows specific config location.")
- def test_windows_config_from_path(self):
- from ..config import get_win_system_paths
-
- install_dir = os.path.join("C:", "foo", "Git")
- self.overrideEnv("PATH", os.path.join(install_dir, "cmd"))
- with patch("os.path.exists", return_value=True):
- paths = set(get_win_system_paths())
- self.assertEqual(
- {
- os.path.join(os.environ.get("PROGRAMDATA"), "Git", "config"),
- os.path.join(install_dir, "etc", "gitconfig"),
- },
- paths,
- )
-
- @skipIf(sys.platform != "win32", "Windows specific config location.")
- def test_windows_config_from_reg(self):
- import winreg
-
- from ..config import get_win_system_paths
-
- self.overrideEnv("PATH", None)
- install_dir = os.path.join("C:", "foo", "Git")
- with patch("winreg.OpenKey"):
- with patch(
- "winreg.QueryValueEx",
- return_value=(install_dir, winreg.REG_SZ),
- ):
- paths = set(get_win_system_paths())
- self.assertEqual(
- {
- os.path.join(os.environ.get("PROGRAMDATA"), "Git", "config"),
- os.path.join(install_dir, "etc", "gitconfig"),
- },
- paths,
- )
-
-
-class EscapeValueTests(TestCase):
- def test_nothing(self):
- self.assertEqual(b"foo", _escape_value(b"foo"))
-
- def test_backslash(self):
- self.assertEqual(b"foo\\\\", _escape_value(b"foo\\"))
-
- def test_newline(self):
- self.assertEqual(b"foo\\n", _escape_value(b"foo\n"))
-
-
-class FormatStringTests(TestCase):
- def test_quoted(self):
- self.assertEqual(b'" foo"', _format_string(b" foo"))
- self.assertEqual(b'"\\tfoo"', _format_string(b"\tfoo"))
-
- def test_not_quoted(self):
- self.assertEqual(b"foo", _format_string(b"foo"))
- self.assertEqual(b"foo bar", _format_string(b"foo bar"))
-
-
-class ParseStringTests(TestCase):
- def test_quoted(self):
- self.assertEqual(b" foo", _parse_string(b'" foo"'))
- self.assertEqual(b"\tfoo", _parse_string(b'"\\tfoo"'))
-
- def test_not_quoted(self):
- self.assertEqual(b"foo", _parse_string(b"foo"))
- self.assertEqual(b"foo bar", _parse_string(b"foo bar"))
-
- def test_nothing(self):
- self.assertEqual(b"", _parse_string(b""))
-
- def test_tab(self):
- self.assertEqual(b"\tbar\t", _parse_string(b"\\tbar\\t"))
-
- def test_newline(self):
- self.assertEqual(b"\nbar\t", _parse_string(b"\\nbar\\t\t"))
-
- def test_quote(self):
- self.assertEqual(b'"foo"', _parse_string(b'\\"foo\\"'))
-
-
-class CheckVariableNameTests(TestCase):
- def test_invalid(self):
- self.assertFalse(_check_variable_name(b"foo "))
- self.assertFalse(_check_variable_name(b"bar,bar"))
- self.assertFalse(_check_variable_name(b"bar.bar"))
-
- def test_valid(self):
- self.assertTrue(_check_variable_name(b"FOO"))
- self.assertTrue(_check_variable_name(b"foo"))
- self.assertTrue(_check_variable_name(b"foo-bar"))
-
-
-class CheckSectionNameTests(TestCase):
- def test_invalid(self):
- self.assertFalse(_check_section_name(b"foo "))
- self.assertFalse(_check_section_name(b"bar,bar"))
-
- def test_valid(self):
- self.assertTrue(_check_section_name(b"FOO"))
- self.assertTrue(_check_section_name(b"foo"))
- self.assertTrue(_check_section_name(b"foo-bar"))
- self.assertTrue(_check_section_name(b"bar.bar"))
-
-
-class SubmodulesTests(TestCase):
- def testSubmodules(self):
- cf = ConfigFile.from_file(
- BytesIO(
- b"""\
-[submodule "core/lib"]
-\tpath = core/lib
-\turl = https://github.com/phhusson/QuasselC.git
-"""
- )
- )
- got = list(parse_submodules(cf))
- self.assertEqual(
- [
- (
- b"core/lib",
- b"https://github.com/phhusson/QuasselC.git",
- b"core/lib",
- )
- ],
- got,
- )
-
- def testMalformedSubmodules(self):
- cf = ConfigFile.from_file(
- BytesIO(
- b"""\
-[submodule "core/lib"]
-\tpath = core/lib
-\turl = https://github.com/phhusson/QuasselC.git
-
-[submodule "dulwich"]
-\turl = https://github.com/jelmer/dulwich
-"""
- )
- )
- got = list(parse_submodules(cf))
- self.assertEqual(
- [
- (
- b"core/lib",
- b"https://github.com/phhusson/QuasselC.git",
- b"core/lib",
- )
- ],
- got,
- )
-
-
-class ApplyInsteadOfTests(TestCase):
- def test_none(self):
- config = ConfigDict()
- self.assertEqual(
- "https://example.com/", apply_instead_of(config, "https://example.com/")
- )
-
- def test_apply(self):
- config = ConfigDict()
- config.set(("url", "https://samba.org/"), "insteadOf", "https://example.com/")
- self.assertEqual(
- "https://samba.org/", apply_instead_of(config, "https://example.com/")
- )
-
- def test_apply_multiple(self):
- config = ConfigDict()
- config.set(("url", "https://samba.org/"), "insteadOf", "https://blah.com/")
- config.set(("url", "https://samba.org/"), "insteadOf", "https://example.com/")
- self.assertEqual(
- [b"https://blah.com/", b"https://example.com/"],
- list(config.get_multivar(("url", "https://samba.org/"), "insteadOf")),
- )
- self.assertEqual(
- "https://samba.org/", apply_instead_of(config, "https://example.com/")
- )
blob - 3f06bef8b180b19b246a0e42b444bf011ba63514 (mode 644)
blob + /dev/null
--- dulwich/tests/test_credentials.py
+++ /dev/null
-# test_credentials.py -- tests for credentials.py
-
-# Copyright (C) 2022 Daniele Trifirò <daniele@iterative.ai>
-#
-# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
-# General Public License as public by the Free Software Foundation; version 2.0
-# or (at your option) any later version. You can redistribute it and/or
-# modify it under the terms of either of these two licenses.
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# You should have received a copy of the licenses; if not, see
-# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
-# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
-# License, Version 2.0.
-#
-
-from urllib.parse import urlparse
-
-from dulwich.tests import TestCase
-
-from ..config import ConfigDict
-from ..credentials import match_partial_url, match_urls, urlmatch_credential_sections
-
-
-class TestCredentialHelpersUtils(TestCase):
- def test_match_urls(self):
- url = urlparse("https://github.com/jelmer/dulwich/")
- url_1 = urlparse("https://github.com/jelmer/dulwich")
- url_2 = urlparse("https://github.com/jelmer")
- url_3 = urlparse("https://github.com")
- self.assertTrue(match_urls(url, url_1))
- self.assertTrue(match_urls(url, url_2))
- self.assertTrue(match_urls(url, url_3))
-
- non_matching = urlparse("https://git.sr.ht/")
- self.assertFalse(match_urls(url, non_matching))
-
- def test_match_partial_url(self):
- url = urlparse("https://github.com/jelmer/dulwich/")
- self.assertTrue(match_partial_url(url, "github.com"))
- self.assertFalse(match_partial_url(url, "github.com/jelmer/"))
- self.assertTrue(match_partial_url(url, "github.com/jelmer/dulwich"))
- self.assertFalse(match_partial_url(url, "github.com/jel"))
- self.assertFalse(match_partial_url(url, "github.com/jel/"))
-
- def test_urlmatch_credential_sections(self):
- config = ConfigDict()
- config.set((b"credential", "https://github.com"), b"helper", "foo")
- config.set((b"credential", "git.sr.ht"), b"helper", "foo")
- config.set(b"credential", b"helper", "bar")
-
- self.assertEqual(
- list(urlmatch_credential_sections(config, "https://github.com")),
- [
- (b"credential", b"https://github.com"),
- (b"credential",),
- ],
- )
-
- self.assertEqual(
- list(urlmatch_credential_sections(config, "https://git.sr.ht")),
- [
- (b"credential", b"git.sr.ht"),
- (b"credential",),
- ],
- )
-
- self.assertEqual(
- list(urlmatch_credential_sections(config, "missing_url")),
- [(b"credential",)],
- )
blob - 63dc8fcd405cebabf0555379f2ae3bc2c600d99a (mode 644)
blob + /dev/null
--- dulwich/tests/test_diff_tree.py
+++ /dev/null
-# test_diff_tree.py -- Tests for file and tree diff utilities.
-# Copyright (C) 2010 Google, Inc.
-#
-# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
-# General Public License as public by the Free Software Foundation; version 2.0
-# or (at your option) any later version. You can redistribute it and/or
-# modify it under the terms of either of these two licenses.
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# You should have received a copy of the licenses; if not, see
-# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
-# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
-# License, Version 2.0.
-#
-
-"""Tests for file and tree diff utilities."""
-
-from itertools import permutations
-
-from dulwich.tests import TestCase
-
-from ..diff_tree import (
- CHANGE_COPY,
- CHANGE_MODIFY,
- CHANGE_RENAME,
- CHANGE_UNCHANGED,
- RenameDetector,
- TreeChange,
- _count_blocks,
- _count_blocks_py,
- _is_tree,
- _is_tree_py,
- _merge_entries,
- _merge_entries_py,
- _similarity_score,
- _tree_change_key,
- tree_changes,
- tree_changes_for_merge,
-)
-from ..index import commit_tree
-from ..object_store import MemoryObjectStore
-from ..objects import Blob, ShaFile, Tree, TreeEntry
-from .utils import F, ext_functest_builder, functest_builder, make_object
-
-
-class DiffTestCase(TestCase):
- def setUp(self):
- super().setUp()
- self.store = MemoryObjectStore()
- self.empty_tree = self.commit_tree([])
-
- def commit_tree(self, entries):
- commit_blobs = []
- for entry in entries:
- if len(entry) == 2:
- path, obj = entry
- mode = F
- else:
- path, obj, mode = entry
- if isinstance(obj, Blob):
- self.store.add_object(obj)
- sha = obj.id
- else:
- sha = obj
- commit_blobs.append((path, sha, mode))
- return self.store[commit_tree(self.store, commit_blobs)]
-
-
-class TreeChangesTest(DiffTestCase):
- def setUp(self):
- super().setUp()
- self.detector = RenameDetector(self.store)
-
- def assertMergeFails(self, merge_entries, name, mode, sha):
- t = Tree()
- t[name] = (mode, sha)
- self.assertRaises((TypeError, ValueError), merge_entries, "", t, t)
-
- def _do_test_merge_entries(self, merge_entries):
- blob_a1 = make_object(Blob, data=b"a1")
- blob_a2 = make_object(Blob, data=b"a2")
- blob_b1 = make_object(Blob, data=b"b1")
- blob_c2 = make_object(Blob, data=b"c2")
- tree1 = self.commit_tree([(b"a", blob_a1, 0o100644), (b"b", blob_b1, 0o100755)])
- tree2 = self.commit_tree([(b"a", blob_a2, 0o100644), (b"c", blob_c2, 0o100755)])
-
- self.assertEqual([], merge_entries(b"", self.empty_tree, self.empty_tree))
- self.assertEqual(
- [
- ((None, None, None), (b"a", 0o100644, blob_a1.id)),
- ((None, None, None), (b"b", 0o100755, blob_b1.id)),
- ],
- merge_entries(b"", self.empty_tree, tree1),
- )
- self.assertEqual(
- [
- ((None, None, None), (b"x/a", 0o100644, blob_a1.id)),
- ((None, None, None), (b"x/b", 0o100755, blob_b1.id)),
- ],
- merge_entries(b"x", self.empty_tree, tree1),
- )
-
- self.assertEqual(
- [
- ((b"a", 0o100644, blob_a2.id), (None, None, None)),
- ((b"c", 0o100755, blob_c2.id), (None, None, None)),
- ],
- merge_entries(b"", tree2, self.empty_tree),
- )
-
- self.assertEqual(
- [
- ((b"a", 0o100644, blob_a1.id), (b"a", 0o100644, blob_a2.id)),
- ((b"b", 0o100755, blob_b1.id), (None, None, None)),
- ((None, None, None), (b"c", 0o100755, blob_c2.id)),
- ],
- merge_entries(b"", tree1, tree2),
- )
-
- self.assertEqual(
- [
- ((b"a", 0o100644, blob_a2.id), (b"a", 0o100644, blob_a1.id)),
- ((None, None, None), (b"b", 0o100755, blob_b1.id)),
- ((b"c", 0o100755, blob_c2.id), (None, None, None)),
- ],
- merge_entries(b"", tree2, tree1),
- )
-
- self.assertMergeFails(merge_entries, 0xDEADBEEF, 0o100644, "1" * 40)
- self.assertMergeFails(merge_entries, b"a", b"deadbeef", "1" * 40)
- self.assertMergeFails(merge_entries, b"a", 0o100644, 0xDEADBEEF)
-
- test_merge_entries = functest_builder(_do_test_merge_entries, _merge_entries_py)
- test_merge_entries_extension = ext_functest_builder(
- _do_test_merge_entries, _merge_entries
- )
-
- def _do_test_is_tree(self, is_tree):
- self.assertFalse(is_tree(TreeEntry(None, None, None)))
- self.assertFalse(is_tree(TreeEntry(b"a", 0o100644, b"a" * 40)))
- self.assertFalse(is_tree(TreeEntry(b"a", 0o100755, b"a" * 40)))
- self.assertFalse(is_tree(TreeEntry(b"a", 0o120000, b"a" * 40)))
- self.assertTrue(is_tree(TreeEntry(b"a", 0o040000, b"a" * 40)))
- self.assertRaises(TypeError, is_tree, TreeEntry(b"a", b"x", b"a" * 40))
- self.assertRaises(AttributeError, is_tree, 1234)
-
- test_is_tree = functest_builder(_do_test_is_tree, _is_tree_py)
- test_is_tree_extension = ext_functest_builder(_do_test_is_tree, _is_tree)
-
- def assertChangesEqual(self, expected, tree1, tree2, **kwargs):
- actual = list(tree_changes(self.store, tree1.id, tree2.id, **kwargs))
- self.assertEqual(expected, actual)
-
- # For brevity, the following tests use tuples instead of TreeEntry objects.
-
- def test_tree_changes_empty(self):
- self.assertChangesEqual([], self.empty_tree, self.empty_tree)
-
- def test_tree_changes_no_changes(self):
- blob = make_object(Blob, data=b"blob")
- tree = self.commit_tree([(b"a", blob), (b"b/c", blob)])
- self.assertChangesEqual([], self.empty_tree, self.empty_tree)
- self.assertChangesEqual([], tree, tree)
- self.assertChangesEqual(
- [
- TreeChange(CHANGE_UNCHANGED, (b"a", F, blob.id), (b"a", F, blob.id)),
- TreeChange(
- CHANGE_UNCHANGED,
- (b"b/c", F, blob.id),
- (b"b/c", F, blob.id),
- ),
- ],
- tree,
- tree,
- want_unchanged=True,
- )
-
- def test_tree_changes_add_delete(self):
- blob_a = make_object(Blob, data=b"a")
- blob_b = make_object(Blob, data=b"b")
- tree = self.commit_tree([(b"a", blob_a, 0o100644), (b"x/b", blob_b, 0o100755)])
- self.assertChangesEqual(
- [
- TreeChange.add((b"a", 0o100644, blob_a.id)),
- TreeChange.add((b"x/b", 0o100755, blob_b.id)),
- ],
- self.empty_tree,
- tree,
- )
- self.assertChangesEqual(
- [
- TreeChange.delete((b"a", 0o100644, blob_a.id)),
- TreeChange.delete((b"x/b", 0o100755, blob_b.id)),
- ],
- tree,
- self.empty_tree,
- )
-
- def test_tree_changes_modify_contents(self):
- blob_a1 = make_object(Blob, data=b"a1")
- blob_a2 = make_object(Blob, data=b"a2")
- tree1 = self.commit_tree([(b"a", blob_a1)])
- tree2 = self.commit_tree([(b"a", blob_a2)])
- self.assertChangesEqual(
- [TreeChange(CHANGE_MODIFY, (b"a", F, blob_a1.id), (b"a", F, blob_a2.id))],
- tree1,
- tree2,
- )
-
- def test_tree_changes_modify_mode(self):
- blob_a = make_object(Blob, data=b"a")
- tree1 = self.commit_tree([(b"a", blob_a, 0o100644)])
- tree2 = self.commit_tree([(b"a", blob_a, 0o100755)])
- self.assertChangesEqual(
- [
- TreeChange(
- CHANGE_MODIFY,
- (b"a", 0o100644, blob_a.id),
- (b"a", 0o100755, blob_a.id),
- )
- ],
- tree1,
- tree2,
- )
-
- def test_tree_changes_change_type(self):
- blob_a1 = make_object(Blob, data=b"a")
- blob_a2 = make_object(Blob, data=b"/foo/bar")
- tree1 = self.commit_tree([(b"a", blob_a1, 0o100644)])
- tree2 = self.commit_tree([(b"a", blob_a2, 0o120000)])
- self.assertChangesEqual(
- [
- TreeChange.delete((b"a", 0o100644, blob_a1.id)),
- TreeChange.add((b"a", 0o120000, blob_a2.id)),
- ],
- tree1,
- tree2,
- )
-
- def test_tree_changes_change_type_same(self):
- blob_a1 = make_object(Blob, data=b"a")
- blob_a2 = make_object(Blob, data=b"/foo/bar")
- tree1 = self.commit_tree([(b"a", blob_a1, 0o100644)])
- tree2 = self.commit_tree([(b"a", blob_a2, 0o120000)])
- self.assertChangesEqual(
- [
- TreeChange(
- CHANGE_MODIFY,
- (b"a", 0o100644, blob_a1.id),
- (b"a", 0o120000, blob_a2.id),
- )
- ],
- tree1,
- tree2,
- change_type_same=True,
- )
-
- def test_tree_changes_to_tree(self):
- blob_a = make_object(Blob, data=b"a")
- blob_x = make_object(Blob, data=b"x")
- tree1 = self.commit_tree([(b"a", blob_a)])
- tree2 = self.commit_tree([(b"a/x", blob_x)])
- self.assertChangesEqual(
- [
- TreeChange.delete((b"a", F, blob_a.id)),
- TreeChange.add((b"a/x", F, blob_x.id)),
- ],
- tree1,
- tree2,
- )
-
- def test_tree_changes_complex(self):
- blob_a_1 = make_object(Blob, data=b"a1_1")
- blob_bx1_1 = make_object(Blob, data=b"bx1_1")
- blob_bx2_1 = make_object(Blob, data=b"bx2_1")
- blob_by1_1 = make_object(Blob, data=b"by1_1")
- blob_by2_1 = make_object(Blob, data=b"by2_1")
- tree1 = self.commit_tree(
- [
- (b"a", blob_a_1),
- (b"b/x/1", blob_bx1_1),
- (b"b/x/2", blob_bx2_1),
- (b"b/y/1", blob_by1_1),
- (b"b/y/2", blob_by2_1),
- ]
- )
-
- blob_a_2 = make_object(Blob, data=b"a1_2")
- blob_bx1_2 = blob_bx1_1
- blob_by_2 = make_object(Blob, data=b"by_2")
- blob_c_2 = make_object(Blob, data=b"c_2")
- tree2 = self.commit_tree(
- [
- (b"a", blob_a_2),
- (b"b/x/1", blob_bx1_2),
- (b"b/y", blob_by_2),
- (b"c", blob_c_2),
- ]
- )
-
- self.assertChangesEqual(
- [
- TreeChange(
- CHANGE_MODIFY,
- (b"a", F, blob_a_1.id),
- (b"a", F, blob_a_2.id),
- ),
- TreeChange.delete((b"b/x/2", F, blob_bx2_1.id)),
- TreeChange.add((b"b/y", F, blob_by_2.id)),
- TreeChange.delete((b"b/y/1", F, blob_by1_1.id)),
- TreeChange.delete((b"b/y/2", F, blob_by2_1.id)),
- TreeChange.add((b"c", F, blob_c_2.id)),
- ],
- tree1,
- tree2,
- )
-
- def test_tree_changes_name_order(self):
- blob = make_object(Blob, data=b"a")
- tree1 = self.commit_tree([(b"a", blob), (b"a.", blob), (b"a..", blob)])
- # Tree order is the reverse of this, so if we used tree order, 'a..'
- # would not be merged.
- tree2 = self.commit_tree([(b"a/x", blob), (b"a./x", blob), (b"a..", blob)])
-
- self.assertChangesEqual(
- [
- TreeChange.delete((b"a", F, blob.id)),
- TreeChange.add((b"a/x", F, blob.id)),
- TreeChange.delete((b"a.", F, blob.id)),
- TreeChange.add((b"a./x", F, blob.id)),
- ],
- tree1,
- tree2,
- )
-
- def test_tree_changes_prune(self):
- blob_a1 = make_object(Blob, data=b"a1")
- blob_a2 = make_object(Blob, data=b"a2")
- blob_x = make_object(Blob, data=b"x")
- tree1 = self.commit_tree([(b"a", blob_a1), (b"b/x", blob_x)])
- tree2 = self.commit_tree([(b"a", blob_a2), (b"b/x", blob_x)])
- # Remove identical items so lookups will fail unless we prune.
- subtree = self.store[tree1[b"b"][1]]
- for entry in subtree.items():
- del self.store[entry.sha]
- del self.store[subtree.id]
-
- self.assertChangesEqual(
- [TreeChange(CHANGE_MODIFY, (b"a", F, blob_a1.id), (b"a", F, blob_a2.id))],
- tree1,
- tree2,
- )
-
- def test_tree_changes_rename_detector(self):
- blob_a1 = make_object(Blob, data=b"a\nb\nc\nd\n")
- blob_a2 = make_object(Blob, data=b"a\nb\nc\ne\n")
- blob_b = make_object(Blob, data=b"b")
- tree1 = self.commit_tree([(b"a", blob_a1), (b"b", blob_b)])
- tree2 = self.commit_tree([(b"c", blob_a2), (b"b", blob_b)])
- detector = RenameDetector(self.store)
-
- self.assertChangesEqual(
- [
- TreeChange.delete((b"a", F, blob_a1.id)),
- TreeChange.add((b"c", F, blob_a2.id)),
- ],
- tree1,
- tree2,
- )
- self.assertChangesEqual(
- [
- TreeChange.delete((b"a", F, blob_a1.id)),
- TreeChange(
- CHANGE_UNCHANGED,
- (b"b", F, blob_b.id),
- (b"b", F, blob_b.id),
- ),
- TreeChange.add((b"c", F, blob_a2.id)),
- ],
- tree1,
- tree2,
- want_unchanged=True,
- )
- self.assertChangesEqual(
- [TreeChange(CHANGE_RENAME, (b"a", F, blob_a1.id), (b"c", F, blob_a2.id))],
- tree1,
- tree2,
- rename_detector=detector,
- )
- self.assertChangesEqual(
- [
- TreeChange(CHANGE_RENAME, (b"a", F, blob_a1.id), (b"c", F, blob_a2.id)),
- TreeChange(
- CHANGE_UNCHANGED,
- (b"b", F, blob_b.id),
- (b"b", F, blob_b.id),
- ),
- ],
- tree1,
- tree2,
- rename_detector=detector,
- want_unchanged=True,
- )
-
- def assertChangesForMergeEqual(self, expected, parent_trees, merge_tree, **kwargs):
- parent_tree_ids = [t.id for t in parent_trees]
- actual = list(
- tree_changes_for_merge(self.store, parent_tree_ids, merge_tree.id, **kwargs)
- )
- self.assertEqual(expected, actual)
-
- parent_tree_ids.reverse()
- expected = [list(reversed(cs)) for cs in expected]
- actual = list(
- tree_changes_for_merge(self.store, parent_tree_ids, merge_tree.id, **kwargs)
- )
- self.assertEqual(expected, actual)
-
- def test_tree_changes_for_merge_add_no_conflict(self):
- blob = make_object(Blob, data=b"blob")
- parent1 = self.commit_tree([])
- parent2 = merge = self.commit_tree([(b"a", blob)])
- self.assertChangesForMergeEqual([], [parent1, parent2], merge)
- self.assertChangesForMergeEqual([], [parent2, parent2], merge)
-
- def test_tree_changes_for_merge_add_modify_conflict(self):
- blob1 = make_object(Blob, data=b"1")
- blob2 = make_object(Blob, data=b"2")
- parent1 = self.commit_tree([])
- parent2 = self.commit_tree([(b"a", blob1)])
- merge = self.commit_tree([(b"a", blob2)])
- self.assertChangesForMergeEqual(
- [
- [
- TreeChange.add((b"a", F, blob2.id)),
- TreeChange(CHANGE_MODIFY, (b"a", F, blob1.id), (b"a", F, blob2.id)),
- ]
- ],
- [parent1, parent2],
- merge,
- )
-
- def test_tree_changes_for_merge_modify_modify_conflict(self):
- blob1 = make_object(Blob, data=b"1")
- blob2 = make_object(Blob, data=b"2")
- blob3 = make_object(Blob, data=b"3")
- parent1 = self.commit_tree([(b"a", blob1)])
- parent2 = self.commit_tree([(b"a", blob2)])
- merge = self.commit_tree([(b"a", blob3)])
- self.assertChangesForMergeEqual(
- [
- [
- TreeChange(CHANGE_MODIFY, (b"a", F, blob1.id), (b"a", F, blob3.id)),
- TreeChange(CHANGE_MODIFY, (b"a", F, blob2.id), (b"a", F, blob3.id)),
- ]
- ],
- [parent1, parent2],
- merge,
- )
-
- def test_tree_changes_for_merge_modify_no_conflict(self):
- blob1 = make_object(Blob, data=b"1")
- blob2 = make_object(Blob, data=b"2")
- parent1 = self.commit_tree([(b"a", blob1)])
- parent2 = merge = self.commit_tree([(b"a", blob2)])
- self.assertChangesForMergeEqual([], [parent1, parent2], merge)
-
- def test_tree_changes_for_merge_delete_delete_conflict(self):
- blob1 = make_object(Blob, data=b"1")
- blob2 = make_object(Blob, data=b"2")
- parent1 = self.commit_tree([(b"a", blob1)])
- parent2 = self.commit_tree([(b"a", blob2)])
- merge = self.commit_tree([])
- self.assertChangesForMergeEqual(
- [
- [
- TreeChange.delete((b"a", F, blob1.id)),
- TreeChange.delete((b"a", F, blob2.id)),
- ]
- ],
- [parent1, parent2],
- merge,
- )
-
- def test_tree_changes_for_merge_delete_no_conflict(self):
- blob = make_object(Blob, data=b"blob")
- has = self.commit_tree([(b"a", blob)])
- doesnt_have = self.commit_tree([])
- self.assertChangesForMergeEqual([], [has, has], doesnt_have)
- self.assertChangesForMergeEqual([], [has, doesnt_have], doesnt_have)
-
- def test_tree_changes_for_merge_octopus_no_conflict(self):
- r = list(range(5))
- blobs = [make_object(Blob, data=bytes(i)) for i in r]
- parents = [self.commit_tree([(b"a", blobs[i])]) for i in r]
- for i in r:
- # Take the SHA from each of the parents.
- self.assertChangesForMergeEqual([], parents, parents[i])
-
- def test_tree_changes_for_merge_octopus_modify_conflict(self):
- # Because the octopus merge strategy is limited, I doubt it's possible
- # to create this with the git command line. But the output is well-
- # defined, so test it anyway.
- r = list(range(5))
- parent_blobs = [make_object(Blob, data=bytes(i)) for i in r]
- merge_blob = make_object(Blob, data=b"merge")
- parents = [self.commit_tree([(b"a", parent_blobs[i])]) for i in r]
- merge = self.commit_tree([(b"a", merge_blob)])
- expected = [
- [
- TreeChange(
- CHANGE_MODIFY,
- (b"a", F, parent_blobs[i].id),
- (b"a", F, merge_blob.id),
- )
- for i in r
- ]
- ]
- self.assertChangesForMergeEqual(expected, parents, merge)
-
- def test_tree_changes_for_merge_octopus_delete(self):
- blob1 = make_object(Blob, data=b"1")
- blob2 = make_object(Blob, data=b"3")
- parent1 = self.commit_tree([(b"a", blob1)])
- parent2 = self.commit_tree([(b"a", blob2)])
- parent3 = merge = self.commit_tree([])
- self.assertChangesForMergeEqual([], [parent1, parent1, parent1], merge)
- self.assertChangesForMergeEqual([], [parent1, parent1, parent3], merge)
- self.assertChangesForMergeEqual([], [parent1, parent3, parent3], merge)
- self.assertChangesForMergeEqual(
- [
- [
- TreeChange.delete((b"a", F, blob1.id)),
- TreeChange.delete((b"a", F, blob2.id)),
- None,
- ]
- ],
- [parent1, parent2, parent3],
- merge,
- )
-
- def test_tree_changes_for_merge_add_add_same_conflict(self):
- blob = make_object(Blob, data=b"a\nb\nc\nd\n")
- parent1 = self.commit_tree([(b"a", blob)])
- parent2 = self.commit_tree([])
- merge = self.commit_tree([(b"b", blob)])
- add = TreeChange.add((b"b", F, blob.id))
- self.assertChangesForMergeEqual([[add, add]], [parent1, parent2], merge)
-
- def test_tree_changes_for_merge_add_exact_rename_conflict(self):
- blob = make_object(Blob, data=b"a\nb\nc\nd\n")
- parent1 = self.commit_tree([(b"a", blob)])
- parent2 = self.commit_tree([])
- merge = self.commit_tree([(b"b", blob)])
- self.assertChangesForMergeEqual(
- [
- [
- TreeChange(CHANGE_RENAME, (b"a", F, blob.id), (b"b", F, blob.id)),
- TreeChange.add((b"b", F, blob.id)),
- ]
- ],
- [parent1, parent2],
- merge,
- rename_detector=self.detector,
- )
-
- def test_tree_changes_for_merge_add_content_rename_conflict(self):
- blob1 = make_object(Blob, data=b"a\nb\nc\nd\n")
- blob2 = make_object(Blob, data=b"a\nb\nc\ne\n")
- parent1 = self.commit_tree([(b"a", blob1)])
- parent2 = self.commit_tree([])
- merge = self.commit_tree([(b"b", blob2)])
- self.assertChangesForMergeEqual(
- [
- [
- TreeChange(CHANGE_RENAME, (b"a", F, blob1.id), (b"b", F, blob2.id)),
- TreeChange.add((b"b", F, blob2.id)),
- ]
- ],
- [parent1, parent2],
- merge,
- rename_detector=self.detector,
- )
-
- def test_tree_changes_for_merge_modify_rename_conflict(self):
- blob1 = make_object(Blob, data=b"a\nb\nc\nd\n")
- blob2 = make_object(Blob, data=b"a\nb\nc\ne\n")
- parent1 = self.commit_tree([(b"a", blob1)])
- parent2 = self.commit_tree([(b"b", blob1)])
- merge = self.commit_tree([(b"b", blob2)])
- self.assertChangesForMergeEqual(
- [
- [
- TreeChange(CHANGE_RENAME, (b"a", F, blob1.id), (b"b", F, blob2.id)),
- TreeChange(CHANGE_MODIFY, (b"b", F, blob1.id), (b"b", F, blob2.id)),
- ]
- ],
- [parent1, parent2],
- merge,
- rename_detector=self.detector,
- )
-
-
-class RenameDetectionTest(DiffTestCase):
- def _do_test_count_blocks(self, count_blocks):
- blob = make_object(Blob, data=b"a\nb\na\n")
- self.assertBlockCountEqual({b"a\n": 4, b"b\n": 2}, count_blocks(blob))
-
- test_count_blocks = functest_builder(_do_test_count_blocks, _count_blocks_py)
- test_count_blocks_extension = ext_functest_builder(
- _do_test_count_blocks, _count_blocks
- )
-
- def _do_test_count_blocks_no_newline(self, count_blocks):
- blob = make_object(Blob, data=b"a\na")
- self.assertBlockCountEqual({b"a\n": 2, b"a": 1}, _count_blocks(blob))
-
- test_count_blocks_no_newline = functest_builder(
- _do_test_count_blocks_no_newline, _count_blocks_py
- )
- test_count_blocks_no_newline_extension = ext_functest_builder(
- _do_test_count_blocks_no_newline, _count_blocks
- )
-
- def assertBlockCountEqual(self, expected, got):
- self.assertEqual(
- {(hash(l) & 0xFFFFFFFF): c for (l, c) in expected.items()},
- {(h & 0xFFFFFFFF): c for (h, c) in got.items()},
- )
-
- def _do_test_count_blocks_chunks(self, count_blocks):
- blob = ShaFile.from_raw_chunks(Blob.type_num, [b"a\nb", b"\na\n"])
- self.assertBlockCountEqual({b"a\n": 4, b"b\n": 2}, _count_blocks(blob))
-
- test_count_blocks_chunks = functest_builder(
- _do_test_count_blocks_chunks, _count_blocks_py
- )
- test_count_blocks_chunks_extension = ext_functest_builder(
- _do_test_count_blocks_chunks, _count_blocks
- )
-
- def _do_test_count_blocks_long_lines(self, count_blocks):
- a = b"a" * 64
- data = a + b"xxx\ny\n" + a + b"zzz\n"
- blob = make_object(Blob, data=data)
- self.assertBlockCountEqual(
- {b"a" * 64: 128, b"xxx\n": 4, b"y\n": 2, b"zzz\n": 4},
- _count_blocks(blob),
- )
-
- test_count_blocks_long_lines = functest_builder(
- _do_test_count_blocks_long_lines, _count_blocks_py
- )
- test_count_blocks_long_lines_extension = ext_functest_builder(
- _do_test_count_blocks_long_lines, _count_blocks
- )
-
- def assertSimilar(self, expected_score, blob1, blob2):
- self.assertEqual(expected_score, _similarity_score(blob1, blob2))
- self.assertEqual(expected_score, _similarity_score(blob2, blob1))
-
- def test_similarity_score(self):
- blob0 = make_object(Blob, data=b"")
- blob1 = make_object(Blob, data=b"ab\ncd\ncd\n")
- blob2 = make_object(Blob, data=b"ab\n")
- blob3 = make_object(Blob, data=b"cd\n")
- blob4 = make_object(Blob, data=b"cd\ncd\n")
-
- self.assertSimilar(100, blob0, blob0)
- self.assertSimilar(0, blob0, blob1)
- self.assertSimilar(33, blob1, blob2)
- self.assertSimilar(33, blob1, blob3)
- self.assertSimilar(66, blob1, blob4)
- self.assertSimilar(0, blob2, blob3)
- self.assertSimilar(50, blob3, blob4)
-
- def test_similarity_score_cache(self):
- blob1 = make_object(Blob, data=b"ab\ncd\n")
- blob2 = make_object(Blob, data=b"ab\n")
-
- block_cache = {}
- self.assertEqual(50, _similarity_score(blob1, blob2, block_cache=block_cache))
- self.assertEqual({blob1.id, blob2.id}, set(block_cache))
-
- def fail_chunks():
- self.fail("Unexpected call to as_raw_chunks()")
-
- blob1.as_raw_chunks = blob2.as_raw_chunks = fail_chunks
- blob1.raw_length = lambda: 6
- blob2.raw_length = lambda: 3
- self.assertEqual(50, _similarity_score(blob1, blob2, block_cache=block_cache))
-
- def test_tree_entry_sort(self):
- sha = "abcd" * 10
- expected_entries = [
- TreeChange.add(TreeEntry(b"aaa", F, sha)),
- TreeChange(
- CHANGE_COPY,
- TreeEntry(b"bbb", F, sha),
- TreeEntry(b"aab", F, sha),
- ),
- TreeChange(
- CHANGE_MODIFY,
- TreeEntry(b"bbb", F, sha),
- TreeEntry(b"bbb", F, b"dabc" * 10),
- ),
- TreeChange(
- CHANGE_RENAME,
- TreeEntry(b"bbc", F, sha),
- TreeEntry(b"ddd", F, sha),
- ),
- TreeChange.delete(TreeEntry(b"ccc", F, sha)),
- ]
-
- for perm in permutations(expected_entries):
- self.assertEqual(expected_entries, sorted(perm, key=_tree_change_key))
-
- def detect_renames(self, tree1, tree2, want_unchanged=False, **kwargs):
- detector = RenameDetector(self.store, **kwargs)
- return detector.changes_with_renames(
- tree1.id, tree2.id, want_unchanged=want_unchanged
- )
-
- def test_no_renames(self):
- blob1 = make_object(Blob, data=b"a\nb\nc\nd\n")
- blob2 = make_object(Blob, data=b"a\nb\ne\nf\n")
- blob3 = make_object(Blob, data=b"a\nb\ng\nh\n")
- tree1 = self.commit_tree([(b"a", blob1), (b"b", blob2)])
- tree2 = self.commit_tree([(b"a", blob1), (b"b", blob3)])
- self.assertEqual(
- [TreeChange(CHANGE_MODIFY, (b"b", F, blob2.id), (b"b", F, blob3.id))],
- self.detect_renames(tree1, tree2),
- )
-
- def test_exact_rename_one_to_one(self):
- blob1 = make_object(Blob, data=b"1")
- blob2 = make_object(Blob, data=b"2")
- tree1 = self.commit_tree([(b"a", blob1), (b"b", blob2)])
- tree2 = self.commit_tree([(b"c", blob1), (b"d", blob2)])
- self.assertEqual(
- [
- TreeChange(CHANGE_RENAME, (b"a", F, blob1.id), (b"c", F, blob1.id)),
- TreeChange(CHANGE_RENAME, (b"b", F, blob2.id), (b"d", F, blob2.id)),
- ],
- self.detect_renames(tree1, tree2),
- )
-
- def test_exact_rename_split_different_type(self):
- blob = make_object(Blob, data=b"/foo")
- tree1 = self.commit_tree([(b"a", blob, 0o100644)])
- tree2 = self.commit_tree([(b"a", blob, 0o120000)])
- self.assertEqual(
- [
- TreeChange.add((b"a", 0o120000, blob.id)),
- TreeChange.delete((b"a", 0o100644, blob.id)),
- ],
- self.detect_renames(tree1, tree2),
- )
-
- def test_exact_rename_and_different_type(self):
- blob1 = make_object(Blob, data=b"1")
- blob2 = make_object(Blob, data=b"2")
- tree1 = self.commit_tree([(b"a", blob1)])
- tree2 = self.commit_tree([(b"a", blob2, 0o120000), (b"b", blob1)])
- self.assertEqual(
- [
- TreeChange.add((b"a", 0o120000, blob2.id)),
- TreeChange(CHANGE_RENAME, (b"a", F, blob1.id), (b"b", F, blob1.id)),
- ],
- self.detect_renames(tree1, tree2),
- )
-
- def test_exact_rename_one_to_many(self):
- blob = make_object(Blob, data=b"1")
- tree1 = self.commit_tree([(b"a", blob)])
- tree2 = self.commit_tree([(b"b", blob), (b"c", blob)])
- self.assertEqual(
- [
- TreeChange(CHANGE_RENAME, (b"a", F, blob.id), (b"b", F, blob.id)),
- TreeChange(CHANGE_COPY, (b"a", F, blob.id), (b"c", F, blob.id)),
- ],
- self.detect_renames(tree1, tree2),
- )
-
- def test_exact_rename_many_to_one(self):
- blob = make_object(Blob, data=b"1")
- tree1 = self.commit_tree([(b"a", blob), (b"b", blob)])
- tree2 = self.commit_tree([(b"c", blob)])
- self.assertEqual(
- [
- TreeChange(CHANGE_RENAME, (b"a", F, blob.id), (b"c", F, blob.id)),
- TreeChange.delete((b"b", F, blob.id)),
- ],
- self.detect_renames(tree1, tree2),
- )
-
- def test_exact_rename_many_to_many(self):
- blob = make_object(Blob, data=b"1")
- tree1 = self.commit_tree([(b"a", blob), (b"b", blob)])
- tree2 = self.commit_tree([(b"c", blob), (b"d", blob), (b"e", blob)])
- self.assertEqual(
- [
- TreeChange(CHANGE_RENAME, (b"a", F, blob.id), (b"c", F, blob.id)),
- TreeChange(CHANGE_COPY, (b"a", F, blob.id), (b"e", F, blob.id)),
- TreeChange(CHANGE_RENAME, (b"b", F, blob.id), (b"d", F, blob.id)),
- ],
- self.detect_renames(tree1, tree2),
- )
-
- def test_exact_copy_modify(self):
- blob1 = make_object(Blob, data=b"a\nb\nc\nd\n")
- blob2 = make_object(Blob, data=b"a\nb\nc\ne\n")
- tree1 = self.commit_tree([(b"a", blob1)])
- tree2 = self.commit_tree([(b"a", blob2), (b"b", blob1)])
- self.assertEqual(
- [
- TreeChange(CHANGE_MODIFY, (b"a", F, blob1.id), (b"a", F, blob2.id)),
- TreeChange(CHANGE_COPY, (b"a", F, blob1.id), (b"b", F, blob1.id)),
- ],
- self.detect_renames(tree1, tree2),
- )
-
- def test_exact_copy_change_mode(self):
- blob = make_object(Blob, data=b"a\nb\nc\nd\n")
- tree1 = self.commit_tree([(b"a", blob)])
- tree2 = self.commit_tree([(b"a", blob, 0o100755), (b"b", blob)])
- self.assertEqual(
- [
- TreeChange(
- CHANGE_MODIFY,
- (b"a", F, blob.id),
- (b"a", 0o100755, blob.id),
- ),
- TreeChange(CHANGE_COPY, (b"a", F, blob.id), (b"b", F, blob.id)),
- ],
- self.detect_renames(tree1, tree2),
- )
-
- def test_rename_threshold(self):
- blob1 = make_object(Blob, data=b"a\nb\nc\n")
- blob2 = make_object(Blob, data=b"a\nb\nd\n")
- tree1 = self.commit_tree([(b"a", blob1)])
- tree2 = self.commit_tree([(b"b", blob2)])
- self.assertEqual(
- [TreeChange(CHANGE_RENAME, (b"a", F, blob1.id), (b"b", F, blob2.id))],
- self.detect_renames(tree1, tree2, rename_threshold=50),
- )
- self.assertEqual(
- [
- TreeChange.delete((b"a", F, blob1.id)),
- TreeChange.add((b"b", F, blob2.id)),
- ],
- self.detect_renames(tree1, tree2, rename_threshold=75),
- )
-
- def test_content_rename_max_files(self):
- blob1 = make_object(Blob, data=b"a\nb\nc\nd")
- blob4 = make_object(Blob, data=b"a\nb\nc\ne\n")
- blob2 = make_object(Blob, data=b"e\nf\ng\nh\n")
- blob3 = make_object(Blob, data=b"e\nf\ng\ni\n")
- tree1 = self.commit_tree([(b"a", blob1), (b"b", blob2)])
- tree2 = self.commit_tree([(b"c", blob3), (b"d", blob4)])
- self.assertEqual(
- [
- TreeChange(CHANGE_RENAME, (b"a", F, blob1.id), (b"d", F, blob4.id)),
- TreeChange(CHANGE_RENAME, (b"b", F, blob2.id), (b"c", F, blob3.id)),
- ],
- self.detect_renames(tree1, tree2),
- )
- self.assertEqual(
- [
- TreeChange.delete((b"a", F, blob1.id)),
- TreeChange.delete((b"b", F, blob2.id)),
- TreeChange.add((b"c", F, blob3.id)),
- TreeChange.add((b"d", F, blob4.id)),
- ],
- self.detect_renames(tree1, tree2, max_files=1),
- )
-
- def test_content_rename_one_to_one(self):
- b11 = make_object(Blob, data=b"a\nb\nc\nd\n")
- b12 = make_object(Blob, data=b"a\nb\nc\ne\n")
- b21 = make_object(Blob, data=b"e\nf\ng\n\nh")
- b22 = make_object(Blob, data=b"e\nf\ng\n\ni")
- tree1 = self.commit_tree([(b"a", b11), (b"b", b21)])
- tree2 = self.commit_tree([(b"c", b12), (b"d", b22)])
- self.assertEqual(
- [
- TreeChange(CHANGE_RENAME, (b"a", F, b11.id), (b"c", F, b12.id)),
- TreeChange(CHANGE_RENAME, (b"b", F, b21.id), (b"d", F, b22.id)),
- ],
- self.detect_renames(tree1, tree2),
- )
-
- def test_content_rename_one_to_one_ordering(self):
- blob1 = make_object(Blob, data=b"a\nb\nc\nd\ne\nf\n")
- blob2 = make_object(Blob, data=b"a\nb\nc\nd\ng\nh\n")
- # 6/10 match to blob1, 8/10 match to blob2
- blob3 = make_object(Blob, data=b"a\nb\nc\nd\ng\ni\n")
- tree1 = self.commit_tree([(b"a", blob1), (b"b", blob2)])
- tree2 = self.commit_tree([(b"c", blob3)])
- self.assertEqual(
- [
- TreeChange.delete((b"a", F, blob1.id)),
- TreeChange(CHANGE_RENAME, (b"b", F, blob2.id), (b"c", F, blob3.id)),
- ],
- self.detect_renames(tree1, tree2),
- )
-
- tree3 = self.commit_tree([(b"a", blob2), (b"b", blob1)])
- tree4 = self.commit_tree([(b"c", blob3)])
- self.assertEqual(
- [
- TreeChange(CHANGE_RENAME, (b"a", F, blob2.id), (b"c", F, blob3.id)),
- TreeChange.delete((b"b", F, blob1.id)),
- ],
- self.detect_renames(tree3, tree4),
- )
-
- def test_content_rename_one_to_many(self):
- blob1 = make_object(Blob, data=b"aa\nb\nc\nd\ne\n")
- blob2 = make_object(Blob, data=b"ab\nb\nc\nd\ne\n") # 8/11 match
- blob3 = make_object(Blob, data=b"aa\nb\nc\nd\nf\n") # 9/11 match
- tree1 = self.commit_tree([(b"a", blob1)])
- tree2 = self.commit_tree([(b"b", blob2), (b"c", blob3)])
- self.assertEqual(
- [
- TreeChange(CHANGE_COPY, (b"a", F, blob1.id), (b"b", F, blob2.id)),
- TreeChange(CHANGE_RENAME, (b"a", F, blob1.id), (b"c", F, blob3.id)),
- ],
- self.detect_renames(tree1, tree2),
- )
-
- def test_content_rename_many_to_one(self):
- blob1 = make_object(Blob, data=b"a\nb\nc\nd\n")
- blob2 = make_object(Blob, data=b"a\nb\nc\ne\n")
- blob3 = make_object(Blob, data=b"a\nb\nc\nf\n")
- tree1 = self.commit_tree([(b"a", blob1), (b"b", blob2)])
- tree2 = self.commit_tree([(b"c", blob3)])
- self.assertEqual(
- [
- TreeChange(CHANGE_RENAME, (b"a", F, blob1.id), (b"c", F, blob3.id)),
- TreeChange.delete((b"b", F, blob2.id)),
- ],
- self.detect_renames(tree1, tree2),
- )
-
- def test_content_rename_many_to_many(self):
- blob1 = make_object(Blob, data=b"a\nb\nc\nd\n")
- blob2 = make_object(Blob, data=b"a\nb\nc\ne\n")
- blob3 = make_object(Blob, data=b"a\nb\nc\nf\n")
- blob4 = make_object(Blob, data=b"a\nb\nc\ng\n")
- tree1 = self.commit_tree([(b"a", blob1), (b"b", blob2)])
- tree2 = self.commit_tree([(b"c", blob3), (b"d", blob4)])
- # TODO(dborowitz): Distribute renames rather than greedily choosing
- # copies.
- self.assertEqual(
- [
- TreeChange(CHANGE_RENAME, (b"a", F, blob1.id), (b"c", F, blob3.id)),
- TreeChange(CHANGE_COPY, (b"a", F, blob1.id), (b"d", F, blob4.id)),
- TreeChange.delete((b"b", F, blob2.id)),
- ],
- self.detect_renames(tree1, tree2),
- )
-
- def test_content_rename_with_more_deletions(self):
- blob1 = make_object(Blob, data=b"")
- tree1 = self.commit_tree(
- [(b"a", blob1), (b"b", blob1), (b"c", blob1), (b"d", blob1)]
- )
- tree2 = self.commit_tree([(b"e", blob1), (b"f", blob1), (b"g", blob1)])
- self.maxDiff = None
- self.assertEqual(
- [
- TreeChange(CHANGE_RENAME, (b"a", F, blob1.id), (b"e", F, blob1.id)),
- TreeChange(CHANGE_RENAME, (b"b", F, blob1.id), (b"f", F, blob1.id)),
- TreeChange(CHANGE_RENAME, (b"c", F, blob1.id), (b"g", F, blob1.id)),
- TreeChange.delete((b"d", F, blob1.id)),
- ],
- self.detect_renames(tree1, tree2),
- )
-
- def test_content_rename_gitlink(self):
- blob1 = make_object(Blob, data=b"blob1")
- blob2 = make_object(Blob, data=b"blob2")
- link1 = b"1" * 40
- link2 = b"2" * 40
- tree1 = self.commit_tree([(b"a", blob1), (b"b", link1, 0o160000)])
- tree2 = self.commit_tree([(b"c", blob2), (b"d", link2, 0o160000)])
- self.assertEqual(
- [
- TreeChange.delete((b"a", 0o100644, blob1.id)),
- TreeChange.delete((b"b", 0o160000, link1)),
- TreeChange.add((b"c", 0o100644, blob2.id)),
- TreeChange.add((b"d", 0o160000, link2)),
- ],
- self.detect_renames(tree1, tree2),
- )
-
- def test_exact_rename_swap(self):
- blob1 = make_object(Blob, data=b"1")
- blob2 = make_object(Blob, data=b"2")
- tree1 = self.commit_tree([(b"a", blob1), (b"b", blob2)])
- tree2 = self.commit_tree([(b"a", blob2), (b"b", blob1)])
- self.assertEqual(
- [
- TreeChange(CHANGE_MODIFY, (b"a", F, blob1.id), (b"a", F, blob2.id)),
- TreeChange(CHANGE_MODIFY, (b"b", F, blob2.id), (b"b", F, blob1.id)),
- ],
- self.detect_renames(tree1, tree2),
- )
- self.assertEqual(
- [
- TreeChange(CHANGE_RENAME, (b"a", F, blob1.id), (b"b", F, blob1.id)),
- TreeChange(CHANGE_RENAME, (b"b", F, blob2.id), (b"a", F, blob2.id)),
- ],
- self.detect_renames(tree1, tree2, rewrite_threshold=50),
- )
-
- def test_content_rename_swap(self):
- blob1 = make_object(Blob, data=b"a\nb\nc\nd\n")
- blob2 = make_object(Blob, data=b"e\nf\ng\nh\n")
- blob3 = make_object(Blob, data=b"a\nb\nc\ne\n")
- blob4 = make_object(Blob, data=b"e\nf\ng\ni\n")
- tree1 = self.commit_tree([(b"a", blob1), (b"b", blob2)])
- tree2 = self.commit_tree([(b"a", blob4), (b"b", blob3)])
- self.assertEqual(
- [
- TreeChange(CHANGE_RENAME, (b"a", F, blob1.id), (b"b", F, blob3.id)),
- TreeChange(CHANGE_RENAME, (b"b", F, blob2.id), (b"a", F, blob4.id)),
- ],
- self.detect_renames(tree1, tree2, rewrite_threshold=60),
- )
-
- def test_rewrite_threshold(self):
- blob1 = make_object(Blob, data=b"a\nb\nc\nd\n")
- blob2 = make_object(Blob, data=b"a\nb\nc\ne\n")
- blob3 = make_object(Blob, data=b"a\nb\nf\ng\n")
-
- tree1 = self.commit_tree([(b"a", blob1)])
- tree2 = self.commit_tree([(b"a", blob3), (b"b", blob2)])
-
- no_renames = [
- TreeChange(CHANGE_MODIFY, (b"a", F, blob1.id), (b"a", F, blob3.id)),
- TreeChange(CHANGE_COPY, (b"a", F, blob1.id), (b"b", F, blob2.id)),
- ]
- self.assertEqual(no_renames, self.detect_renames(tree1, tree2))
- self.assertEqual(
- no_renames, self.detect_renames(tree1, tree2, rewrite_threshold=40)
- )
- self.assertEqual(
- [
- TreeChange.add((b"a", F, blob3.id)),
- TreeChange(CHANGE_RENAME, (b"a", F, blob1.id), (b"b", F, blob2.id)),
- ],
- self.detect_renames(tree1, tree2, rewrite_threshold=80),
- )
-
- def test_find_copies_harder_exact(self):
- blob = make_object(Blob, data=b"blob")
- tree1 = self.commit_tree([(b"a", blob)])
- tree2 = self.commit_tree([(b"a", blob), (b"b", blob)])
- self.assertEqual(
- [TreeChange.add((b"b", F, blob.id))],
- self.detect_renames(tree1, tree2),
- )
- self.assertEqual(
- [TreeChange(CHANGE_COPY, (b"a", F, blob.id), (b"b", F, blob.id))],
- self.detect_renames(tree1, tree2, find_copies_harder=True),
- )
-
- def test_find_copies_harder_content(self):
- blob1 = make_object(Blob, data=b"a\nb\nc\nd\n")
- blob2 = make_object(Blob, data=b"a\nb\nc\ne\n")
- tree1 = self.commit_tree([(b"a", blob1)])
- tree2 = self.commit_tree([(b"a", blob1), (b"b", blob2)])
- self.assertEqual(
- [TreeChange.add((b"b", F, blob2.id))],
- self.detect_renames(tree1, tree2),
- )
- self.assertEqual(
- [TreeChange(CHANGE_COPY, (b"a", F, blob1.id), (b"b", F, blob2.id))],
- self.detect_renames(tree1, tree2, find_copies_harder=True),
- )
-
- def test_find_copies_harder_with_rewrites(self):
- blob_a1 = make_object(Blob, data=b"a\nb\nc\nd\n")
- blob_a2 = make_object(Blob, data=b"f\ng\nh\ni\n")
- blob_b2 = make_object(Blob, data=b"a\nb\nc\ne\n")
- tree1 = self.commit_tree([(b"a", blob_a1)])
- tree2 = self.commit_tree([(b"a", blob_a2), (b"b", blob_b2)])
- self.assertEqual(
- [
- TreeChange(CHANGE_MODIFY, (b"a", F, blob_a1.id), (b"a", F, blob_a2.id)),
- TreeChange(CHANGE_COPY, (b"a", F, blob_a1.id), (b"b", F, blob_b2.id)),
- ],
- self.detect_renames(tree1, tree2, find_copies_harder=True),
- )
- self.assertEqual(
- [
- TreeChange.add((b"a", F, blob_a2.id)),
- TreeChange(CHANGE_RENAME, (b"a", F, blob_a1.id), (b"b", F, blob_b2.id)),
- ],
- self.detect_renames(
- tree1, tree2, rewrite_threshold=50, find_copies_harder=True
- ),
- )
-
- def test_reuse_detector(self):
- blob = make_object(Blob, data=b"blob")
- tree1 = self.commit_tree([(b"a", blob)])
- tree2 = self.commit_tree([(b"b", blob)])
- detector = RenameDetector(self.store)
- changes = [TreeChange(CHANGE_RENAME, (b"a", F, blob.id), (b"b", F, blob.id))]
- self.assertEqual(changes, detector.changes_with_renames(tree1.id, tree2.id))
- self.assertEqual(changes, detector.changes_with_renames(tree1.id, tree2.id))
-
- def test_want_unchanged(self):
- blob_a1 = make_object(Blob, data=b"a\nb\nc\nd\n")
- blob_b = make_object(Blob, data=b"b")
- blob_c2 = make_object(Blob, data=b"a\nb\nc\ne\n")
- tree1 = self.commit_tree([(b"a", blob_a1), (b"b", blob_b)])
- tree2 = self.commit_tree([(b"c", blob_c2), (b"b", blob_b)])
- self.assertEqual(
- [TreeChange(CHANGE_RENAME, (b"a", F, blob_a1.id), (b"c", F, blob_c2.id))],
- self.detect_renames(tree1, tree2),
- )
- self.assertEqual(
- [
- TreeChange(CHANGE_RENAME, (b"a", F, blob_a1.id), (b"c", F, blob_c2.id)),
- TreeChange(
- CHANGE_UNCHANGED,
- (b"b", F, blob_b.id),
- (b"b", F, blob_b.id),
- ),
- ],
- self.detect_renames(tree1, tree2, want_unchanged=True),
- )
blob - 9827234051e77a3729d99b90abb181e5d76b46d7 (mode 644)
blob + /dev/null
--- dulwich/tests/test_fastexport.py
+++ /dev/null
-# test_fastexport.py -- Fast export/import functionality
-# Copyright (C) 2010 Jelmer Vernooij <jelmer@jelmer.uk>
-#
-# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
-# General Public License as public by the Free Software Foundation; version 2.0
-# or (at your option) any later version. You can redistribute it and/or
-# modify it under the terms of either of these two licenses.
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# You should have received a copy of the licenses; if not, see
-# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
-# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
-# License, Version 2.0.
-#
-
-import stat
-from io import BytesIO
-
-from dulwich.tests import SkipTest, TestCase
-
-from ..object_store import MemoryObjectStore
-from ..objects import ZERO_SHA, Blob, Commit, Tree
-from ..repo import MemoryRepo
-from .utils import build_commit_graph
-
-
-class GitFastExporterTests(TestCase):
- """Tests for the GitFastExporter tests."""
-
- def setUp(self):
- super().setUp()
- self.store = MemoryObjectStore()
- self.stream = BytesIO()
- try:
- from ..fastexport import GitFastExporter
- except ImportError as exc:
- raise SkipTest("python-fastimport not available") from exc
- self.fastexporter = GitFastExporter(self.stream, self.store)
-
- def test_emit_blob(self):
- b = Blob()
- b.data = b"fooBAR"
- self.fastexporter.emit_blob(b)
- self.assertEqual(b"blob\nmark :1\ndata 6\nfooBAR\n", self.stream.getvalue())
-
- def test_emit_commit(self):
- b = Blob()
- b.data = b"FOO"
- t = Tree()
- t.add(b"foo", stat.S_IFREG | 0o644, b.id)
- c = Commit()
- c.committer = c.author = b"Jelmer <jelmer@host>"
- c.author_time = c.commit_time = 1271345553
- c.author_timezone = c.commit_timezone = 0
- c.message = b"msg"
- c.tree = t.id
- self.store.add_objects([(b, None), (t, None), (c, None)])
- self.fastexporter.emit_commit(c, b"refs/heads/master")
- self.assertEqual(
- b"""blob
-mark :1
-data 3
-FOO
-commit refs/heads/master
-mark :2
-author Jelmer <jelmer@host> 1271345553 +0000
-committer Jelmer <jelmer@host> 1271345553 +0000
-data 3
-msg
-M 644 :1 foo
-""",
- self.stream.getvalue(),
- )
-
-
-class GitImportProcessorTests(TestCase):
- """Tests for the GitImportProcessor tests."""
-
- def setUp(self):
- super().setUp()
- self.repo = MemoryRepo()
- try:
- from ..fastexport import GitImportProcessor
- except ImportError as exc:
- raise SkipTest("python-fastimport not available") from exc
- self.processor = GitImportProcessor(self.repo)
-
- def test_reset_handler(self):
- from fastimport import commands
-
- [c1] = build_commit_graph(self.repo.object_store, [[1]])
- cmd = commands.ResetCommand(b"refs/heads/foo", c1.id)
- self.processor.reset_handler(cmd)
- self.assertEqual(c1.id, self.repo.get_refs()[b"refs/heads/foo"])
- self.assertEqual(c1.id, self.processor.last_commit)
-
- def test_reset_handler_marker(self):
- from fastimport import commands
-
- [c1, c2] = build_commit_graph(self.repo.object_store, [[1], [2]])
- self.processor.markers[b"10"] = c1.id
- cmd = commands.ResetCommand(b"refs/heads/foo", b":10")
- self.processor.reset_handler(cmd)
- self.assertEqual(c1.id, self.repo.get_refs()[b"refs/heads/foo"])
-
- def test_reset_handler_default(self):
- from fastimport import commands
-
- [c1, c2] = build_commit_graph(self.repo.object_store, [[1], [2]])
- cmd = commands.ResetCommand(b"refs/heads/foo", None)
- self.processor.reset_handler(cmd)
- self.assertEqual(ZERO_SHA, self.repo.get_refs()[b"refs/heads/foo"])
-
- def test_commit_handler(self):
- from fastimport import commands
-
- cmd = commands.CommitCommand(
- b"refs/heads/foo",
- b"mrkr",
- (b"Jelmer", b"jelmer@samba.org", 432432432.0, 3600),
- (b"Jelmer", b"jelmer@samba.org", 432432432.0, 3600),
- b"FOO",
- None,
- [],
- [],
- )
- self.processor.commit_handler(cmd)
- commit = self.repo[self.processor.last_commit]
- self.assertEqual(b"Jelmer <jelmer@samba.org>", commit.author)
- self.assertEqual(b"Jelmer <jelmer@samba.org>", commit.committer)
- self.assertEqual(b"FOO", commit.message)
- self.assertEqual([], commit.parents)
- self.assertEqual(432432432.0, commit.commit_time)
- self.assertEqual(432432432.0, commit.author_time)
- self.assertEqual(3600, commit.commit_timezone)
- self.assertEqual(3600, commit.author_timezone)
- self.assertEqual(commit, self.repo[b"refs/heads/foo"])
-
- def test_commit_handler_markers(self):
- from fastimport import commands
-
- [c1, c2, c3] = build_commit_graph(self.repo.object_store, [[1], [2], [3]])
- self.processor.markers[b"10"] = c1.id
- self.processor.markers[b"42"] = c2.id
- self.processor.markers[b"98"] = c3.id
- cmd = commands.CommitCommand(
- b"refs/heads/foo",
- b"mrkr",
- (b"Jelmer", b"jelmer@samba.org", 432432432.0, 3600),
- (b"Jelmer", b"jelmer@samba.org", 432432432.0, 3600),
- b"FOO",
- b":10",
- [b":42", b":98"],
- [],
- )
- self.processor.commit_handler(cmd)
- commit = self.repo[self.processor.last_commit]
- self.assertEqual(c1.id, commit.parents[0])
- self.assertEqual(c2.id, commit.parents[1])
- self.assertEqual(c3.id, commit.parents[2])
-
- def test_import_stream(self):
- markers = self.processor.import_stream(
- BytesIO(
- b"""blob
-mark :1
-data 11
-text for a
-
-commit refs/heads/master
-mark :2
-committer Joe Foo <joe@foo.com> 1288287382 +0000
-data 20
-<The commit message>
-M 100644 :1 a
-
-"""
- )
- )
- self.assertEqual(2, len(markers))
- self.assertIsInstance(self.repo[markers[b"1"]], Blob)
- self.assertIsInstance(self.repo[markers[b"2"]], Commit)
-
- def test_file_add(self):
- from fastimport import commands
-
- cmd = commands.BlobCommand(b"23", b"data")
- self.processor.blob_handler(cmd)
- cmd = commands.CommitCommand(
- b"refs/heads/foo",
- b"mrkr",
- (b"Jelmer", b"jelmer@samba.org", 432432432.0, 3600),
- (b"Jelmer", b"jelmer@samba.org", 432432432.0, 3600),
- b"FOO",
- None,
- [],
- [commands.FileModifyCommand(b"path", 0o100644, b":23", None)],
- )
- self.processor.commit_handler(cmd)
- commit = self.repo[self.processor.last_commit]
- self.assertEqual(
- [(b"path", 0o100644, b"6320cd248dd8aeaab759d5871f8781b5c0505172")],
- self.repo[commit.tree].items(),
- )
-
- def simple_commit(self):
- from fastimport import commands
-
- cmd = commands.BlobCommand(b"23", b"data")
- self.processor.blob_handler(cmd)
- cmd = commands.CommitCommand(
- b"refs/heads/foo",
- b"mrkr",
- (b"Jelmer", b"jelmer@samba.org", 432432432.0, 3600),
- (b"Jelmer", b"jelmer@samba.org", 432432432.0, 3600),
- b"FOO",
- None,
- [],
- [commands.FileModifyCommand(b"path", 0o100644, b":23", None)],
- )
- self.processor.commit_handler(cmd)
- commit = self.repo[self.processor.last_commit]
- return commit
-
- def make_file_commit(self, file_cmds):
- """Create a trivial commit with the specified file commands.
-
- Args:
- file_cmds: File commands to run.
- Returns: The created commit object
- """
- from fastimport import commands
-
- cmd = commands.CommitCommand(
- b"refs/heads/foo",
- b"mrkr",
- (b"Jelmer", b"jelmer@samba.org", 432432432.0, 3600),
- (b"Jelmer", b"jelmer@samba.org", 432432432.0, 3600),
- b"FOO",
- None,
- [],
- file_cmds,
- )
- self.processor.commit_handler(cmd)
- return self.repo[self.processor.last_commit]
-
- def test_file_copy(self):
- from fastimport import commands
-
- self.simple_commit()
- commit = self.make_file_commit([commands.FileCopyCommand(b"path", b"new_path")])
- self.assertEqual(
- [
- (
- b"new_path",
- 0o100644,
- b"6320cd248dd8aeaab759d5871f8781b5c0505172",
- ),
- (
- b"path",
- 0o100644,
- b"6320cd248dd8aeaab759d5871f8781b5c0505172",
- ),
- ],
- self.repo[commit.tree].items(),
- )
-
- def test_file_move(self):
- from fastimport import commands
-
- self.simple_commit()
- commit = self.make_file_commit(
- [commands.FileRenameCommand(b"path", b"new_path")]
- )
- self.assertEqual(
- [
- (
- b"new_path",
- 0o100644,
- b"6320cd248dd8aeaab759d5871f8781b5c0505172",
- ),
- ],
- self.repo[commit.tree].items(),
- )
-
- def test_file_delete(self):
- from fastimport import commands
-
- self.simple_commit()
- commit = self.make_file_commit([commands.FileDeleteCommand(b"path")])
- self.assertEqual([], self.repo[commit.tree].items())
-
- def test_file_deleteall(self):
- from fastimport import commands
-
- self.simple_commit()
- commit = self.make_file_commit([commands.FileDeleteAllCommand()])
- self.assertEqual([], self.repo[commit.tree].items())
blob - 6c1c19d4f8a878ec5d1a75bed008e22e70e6e035 (mode 644)
blob + /dev/null
--- dulwich/tests/test_file.py
+++ /dev/null
-# test_file.py -- Test for git files
-# Copyright (C) 2010 Google, Inc.
-#
-# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
-# General Public License as public by the Free Software Foundation; version 2.0
-# or (at your option) any later version. You can redistribute it and/or
-# modify it under the terms of either of these two licenses.
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# You should have received a copy of the licenses; if not, see
-# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
-# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
-# License, Version 2.0.
-#
-
-import io
-import os
-import shutil
-import sys
-import tempfile
-
-from dulwich.tests import SkipTest, TestCase
-
-from ..file import FileLocked, GitFile, _fancy_rename
-
-
-class FancyRenameTests(TestCase):
- def setUp(self):
- super().setUp()
- self._tempdir = tempfile.mkdtemp()
- self.foo = self.path("foo")
- self.bar = self.path("bar")
- self.create(self.foo, b"foo contents")
-
- def tearDown(self):
- shutil.rmtree(self._tempdir)
- super().tearDown()
-
- def path(self, filename):
- return os.path.join(self._tempdir, filename)
-
- def create(self, path, contents):
- f = open(path, "wb")
- f.write(contents)
- f.close()
-
- def test_no_dest_exists(self):
- self.assertFalse(os.path.exists(self.bar))
- _fancy_rename(self.foo, self.bar)
- self.assertFalse(os.path.exists(self.foo))
-
- new_f = open(self.bar, "rb")
- self.assertEqual(b"foo contents", new_f.read())
- new_f.close()
-
- def test_dest_exists(self):
- self.create(self.bar, b"bar contents")
- _fancy_rename(self.foo, self.bar)
- self.assertFalse(os.path.exists(self.foo))
-
- new_f = open(self.bar, "rb")
- self.assertEqual(b"foo contents", new_f.read())
- new_f.close()
-
- def test_dest_opened(self):
- if sys.platform != "win32":
- raise SkipTest("platform allows overwriting open files")
- self.create(self.bar, b"bar contents")
- dest_f = open(self.bar, "rb")
- self.assertRaises(OSError, _fancy_rename, self.foo, self.bar)
- dest_f.close()
- self.assertTrue(os.path.exists(self.path("foo")))
-
- new_f = open(self.foo, "rb")
- self.assertEqual(b"foo contents", new_f.read())
- new_f.close()
-
- new_f = open(self.bar, "rb")
- self.assertEqual(b"bar contents", new_f.read())
- new_f.close()
-
-
-class GitFileTests(TestCase):
- def setUp(self):
- super().setUp()
- self._tempdir = tempfile.mkdtemp()
- f = open(self.path("foo"), "wb")
- f.write(b"foo contents")
- f.close()
-
- def tearDown(self):
- shutil.rmtree(self._tempdir)
- super().tearDown()
-
- def path(self, filename):
- return os.path.join(self._tempdir, filename)
-
- def test_invalid(self):
- foo = self.path("foo")
- self.assertRaises(IOError, GitFile, foo, mode="r")
- self.assertRaises(IOError, GitFile, foo, mode="ab")
- self.assertRaises(IOError, GitFile, foo, mode="r+b")
- self.assertRaises(IOError, GitFile, foo, mode="w+b")
- self.assertRaises(IOError, GitFile, foo, mode="a+bU")
-
- def test_readonly(self):
- f = GitFile(self.path("foo"), "rb")
- self.assertIsInstance(f, io.IOBase)
- self.assertEqual(b"foo contents", f.read())
- self.assertEqual(b"", f.read())
- f.seek(4)
- self.assertEqual(b"contents", f.read())
- f.close()
-
- def test_default_mode(self):
- f = GitFile(self.path("foo"))
- self.assertEqual(b"foo contents", f.read())
- f.close()
-
- def test_write(self):
- foo = self.path("foo")
- foo_lock = "%s.lock" % foo
-
- orig_f = open(foo, "rb")
- self.assertEqual(orig_f.read(), b"foo contents")
- orig_f.close()
-
- self.assertFalse(os.path.exists(foo_lock))
- f = GitFile(foo, "wb")
- self.assertFalse(f.closed)
- self.assertRaises(AttributeError, getattr, f, "not_a_file_property")
-
- self.assertTrue(os.path.exists(foo_lock))
- f.write(b"new stuff")
- f.seek(4)
- f.write(b"contents")
- f.close()
- self.assertFalse(os.path.exists(foo_lock))
-
- new_f = open(foo, "rb")
- self.assertEqual(b"new contents", new_f.read())
- new_f.close()
-
- def test_open_twice(self):
- foo = self.path("foo")
- f1 = GitFile(foo, "wb")
- f1.write(b"new")
- try:
- f2 = GitFile(foo, "wb")
- self.fail()
- except FileLocked:
- pass
- else:
- f2.close()
- f1.write(b" contents")
- f1.close()
-
- # Ensure trying to open twice doesn't affect original.
- f = open(foo, "rb")
- self.assertEqual(b"new contents", f.read())
- f.close()
-
- def test_abort(self):
- foo = self.path("foo")
- foo_lock = "%s.lock" % foo
-
- orig_f = open(foo, "rb")
- self.assertEqual(orig_f.read(), b"foo contents")
- orig_f.close()
-
- f = GitFile(foo, "wb")
- f.write(b"new contents")
- f.abort()
- self.assertTrue(f.closed)
- self.assertFalse(os.path.exists(foo_lock))
-
- new_orig_f = open(foo, "rb")
- self.assertEqual(new_orig_f.read(), b"foo contents")
- new_orig_f.close()
-
- def test_abort_close(self):
- foo = self.path("foo")
- f = GitFile(foo, "wb")
- f.abort()
- try:
- f.close()
- except OSError:
- self.fail()
-
- f = GitFile(foo, "wb")
- f.close()
- try:
- f.abort()
- except OSError:
- self.fail()
-
- def test_abort_close_removed(self):
- foo = self.path("foo")
- f = GitFile(foo, "wb")
-
- f._file.close()
- os.remove(foo + ".lock")
-
- f.abort()
- self.assertTrue(f._closed)
blob - 661ad3101d7c657e84efd6699aacb64f6ec04cd6 (mode 644)
blob + /dev/null
--- dulwich/tests/test_grafts.py
+++ /dev/null
-# test_grafts.py -- Tests for graftpoints
-#
-# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
-# General Public License as public by the Free Software Foundation; version 2.0
-# or (at your option) any later version. You can redistribute it and/or
-# modify it under the terms of either of these two licenses.
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# You should have received a copy of the licenses; if not, see
-# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
-# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
-# License, Version 2.0.
-#
-
-"""Tests for graftpoints."""
-
-import os
-import shutil
-import tempfile
-
-from dulwich.tests import TestCase
-
-from ..errors import ObjectFormatException
-from ..objects import Tree
-from ..repo import MemoryRepo, Repo, parse_graftpoints, serialize_graftpoints
-
-
-def makesha(digit):
- return (str(digit).encode("ascii") * 40)[:40]
-
-
-class GraftParserTests(TestCase):
- def assertParse(self, expected, graftpoints):
- self.assertEqual(expected, parse_graftpoints(iter(graftpoints)))
-
- def test_no_grafts(self):
- self.assertParse({}, [])
-
- def test_no_parents(self):
- self.assertParse({makesha(0): []}, [makesha(0)])
-
- def test_parents(self):
- self.assertParse(
- {makesha(0): [makesha(1), makesha(2)]},
- [b" ".join([makesha(0), makesha(1), makesha(2)])],
- )
-
- def test_multiple_hybrid(self):
- self.assertParse(
- {
- makesha(0): [],
- makesha(1): [makesha(2)],
- makesha(3): [makesha(4), makesha(5)],
- },
- [
- makesha(0),
- b" ".join([makesha(1), makesha(2)]),
- b" ".join([makesha(3), makesha(4), makesha(5)]),
- ],
- )
-
-
-class GraftSerializerTests(TestCase):
- def assertSerialize(self, expected, graftpoints):
- self.assertEqual(sorted(expected), sorted(serialize_graftpoints(graftpoints)))
-
- def test_no_grafts(self):
- self.assertSerialize(b"", {})
-
- def test_no_parents(self):
- self.assertSerialize(makesha(0), {makesha(0): []})
-
- def test_parents(self):
- self.assertSerialize(
- b" ".join([makesha(0), makesha(1), makesha(2)]),
- {makesha(0): [makesha(1), makesha(2)]},
- )
-
- def test_multiple_hybrid(self):
- self.assertSerialize(
- b"\n".join(
- [
- makesha(0),
- b" ".join([makesha(1), makesha(2)]),
- b" ".join([makesha(3), makesha(4), makesha(5)]),
- ]
- ),
- {
- makesha(0): [],
- makesha(1): [makesha(2)],
- makesha(3): [makesha(4), makesha(5)],
- },
- )
-
-
-class GraftsInRepositoryBase:
- def tearDown(self):
- super().tearDown()
-
- def get_repo_with_grafts(self, grafts):
- r = self._repo
- r._add_graftpoints(grafts)
- return r
-
- def test_no_grafts(self):
- r = self.get_repo_with_grafts({})
-
- shas = [e.commit.id for e in r.get_walker()]
- self.assertEqual(shas, self._shas[::-1])
-
- def test_no_parents_graft(self):
- r = self.get_repo_with_grafts({self._repo.head(): []})
-
- self.assertEqual([e.commit.id for e in r.get_walker()], [r.head()])
-
- def test_existing_parent_graft(self):
- r = self.get_repo_with_grafts({self._shas[-1]: [self._shas[0]]})
-
- self.assertEqual(
- [e.commit.id for e in r.get_walker()],
- [self._shas[-1], self._shas[0]],
- )
-
- def test_remove_graft(self):
- r = self.get_repo_with_grafts({self._repo.head(): []})
- r._remove_graftpoints([self._repo.head()])
-
- self.assertEqual([e.commit.id for e in r.get_walker()], self._shas[::-1])
-
- def test_object_store_fail_invalid_parents(self):
- r = self._repo
-
- self.assertRaises(
- ObjectFormatException, r._add_graftpoints, {self._shas[-1]: ["1"]}
- )
-
-
-class GraftsInRepoTests(GraftsInRepositoryBase, TestCase):
- def setUp(self):
- super().setUp()
- self._repo_dir = os.path.join(tempfile.mkdtemp())
- r = self._repo = Repo.init(self._repo_dir)
- self.addCleanup(shutil.rmtree, self._repo_dir)
-
- self._shas = []
-
- commit_kwargs = {
- "committer": b"Test Committer <test@nodomain.com>",
- "author": b"Test Author <test@nodomain.com>",
- "commit_timestamp": 12395,
- "commit_timezone": 0,
- "author_timestamp": 12395,
- "author_timezone": 0,
- }
-
- self._shas.append(r.do_commit(b"empty commit", **commit_kwargs))
- self._shas.append(r.do_commit(b"empty commit", **commit_kwargs))
- self._shas.append(r.do_commit(b"empty commit", **commit_kwargs))
-
- def test_init_with_empty_info_grafts(self):
- r = self._repo
- r._put_named_file(os.path.join("info", "grafts"), b"")
-
- r = Repo(self._repo_dir)
- self.assertEqual({}, r._graftpoints)
-
- def test_init_with_info_grafts(self):
- r = self._repo
- r._put_named_file(
- os.path.join("info", "grafts"),
- self._shas[-1] + b" " + self._shas[0],
- )
-
- r = Repo(self._repo_dir)
- self.assertEqual({self._shas[-1]: [self._shas[0]]}, r._graftpoints)
-
-
-class GraftsInMemoryRepoTests(GraftsInRepositoryBase, TestCase):
- def setUp(self):
- super().setUp()
- r = self._repo = MemoryRepo()
-
- self._shas = []
-
- tree = Tree()
-
- commit_kwargs = {
- "committer": b"Test Committer <test@nodomain.com>",
- "author": b"Test Author <test@nodomain.com>",
- "commit_timestamp": 12395,
- "commit_timezone": 0,
- "author_timestamp": 12395,
- "author_timezone": 0,
- "tree": tree.id,
- }
-
- self._shas.append(r.do_commit(b"empty commit", **commit_kwargs))
- self._shas.append(r.do_commit(b"empty commit", **commit_kwargs))
- self._shas.append(r.do_commit(b"empty commit", **commit_kwargs))
blob - 08e00c6a732e2093cc6ca2c64906f360db243963 (mode 644)
blob + /dev/null
--- dulwich/tests/test_graph.py
+++ /dev/null
-# test_graph.py -- Tests for merge base
-# Copyright (c) 2020 Kevin B. Hendricks, Stratford Ontario Canada
-#
-# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
-# General Public License as public by the Free Software Foundation; version 2.0
-# or (at your option) any later version. You can redistribute it and/or
-# modify it under the terms of either of these two licenses.
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# You should have received a copy of the licenses; if not, see
-# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
-# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
-# License, Version 2.0.
-
-"""Tests for dulwich.graph."""
-
-from dulwich.tests import TestCase
-
-from ..graph import WorkList, _find_lcas, can_fast_forward
-from ..repo import MemoryRepo
-from .utils import make_commit
-
-
-class FindMergeBaseTests(TestCase):
- @staticmethod
- def run_test(dag, inputs):
- def lookup_parents(commit_id):
- return dag[commit_id]
-
- def lookup_stamp(commit_id):
- # any constant timestamp value here will work to force
- # this test to test the same behaviour as done previously
- return 100
-
- c1 = inputs[0]
- c2s = inputs[1:]
- return set(_find_lcas(lookup_parents, c1, c2s, lookup_stamp))
-
- def test_multiple_lca(self):
- # two lowest common ancestors
- graph = {
- "5": ["1", "2"],
- "4": ["3", "1"],
- "3": ["2"],
- "2": ["0"],
- "1": [],
- "0": [],
- }
- self.assertEqual(self.run_test(graph, ["4", "5"]), {"1", "2"})
-
- def test_no_common_ancestor(self):
- # no common ancestor
- graph = {
- "4": ["2"],
- "3": ["1"],
- "2": [],
- "1": ["0"],
- "0": [],
- }
- self.assertEqual(self.run_test(graph, ["4", "3"]), set())
-
- def test_ancestor(self):
- # ancestor
- graph = {
- "G": ["D", "F"],
- "F": ["E"],
- "D": ["C"],
- "C": ["B"],
- "E": ["B"],
- "B": ["A"],
- "A": [],
- }
- self.assertEqual(self.run_test(graph, ["D", "C"]), {"C"})
-
- def test_direct_parent(self):
- # parent
- graph = {
- "G": ["D", "F"],
- "F": ["E"],
- "D": ["C"],
- "C": ["B"],
- "E": ["B"],
- "B": ["A"],
- "A": [],
- }
- self.assertEqual(self.run_test(graph, ["G", "D"]), {"D"})
-
- def test_another_crossover(self):
- # Another cross over
- graph = {
- "G": ["D", "F"],
- "F": ["E", "C"],
- "D": ["C", "E"],
- "C": ["B"],
- "E": ["B"],
- "B": ["A"],
- "A": [],
- }
- self.assertEqual(self.run_test(graph, ["D", "F"]), {"E", "C"})
-
- def test_three_way_merge_lca(self):
- # three way merge commit straight from git docs
- graph = {
- "C": ["C1"],
- "C1": ["C2"],
- "C2": ["C3"],
- "C3": ["C4"],
- "C4": ["2"],
- "B": ["B1"],
- "B1": ["B2"],
- "B2": ["B3"],
- "B3": ["1"],
- "A": ["A1"],
- "A1": ["A2"],
- "A2": ["A3"],
- "A3": ["1"],
- "1": ["2"],
- "2": [],
- }
- # assumes a theoretical merge M exists that merges B and C first
- # which actually means find the first LCA from either of B OR C with A
- self.assertEqual(self.run_test(graph, ["A", "B", "C"]), {"1"})
-
- def test_octopus(self):
- # octopus algorithm test
- # test straight from git docs of A, B, and C
- # but this time use octopus to find lcas of A, B, and C simultaneously
- graph = {
- "C": ["C1"],
- "C1": ["C2"],
- "C2": ["C3"],
- "C3": ["C4"],
- "C4": ["2"],
- "B": ["B1"],
- "B1": ["B2"],
- "B2": ["B3"],
- "B3": ["1"],
- "A": ["A1"],
- "A1": ["A2"],
- "A2": ["A3"],
- "A3": ["1"],
- "1": ["2"],
- "2": [],
- }
-
- def lookup_parents(cid):
- return graph[cid]
-
- def lookup_stamp(commit_id):
- # any constant timestamp value here will work to force
- # this test to test the same behaviour as done previously
- return 100
-
- lcas = ["A"]
- others = ["B", "C"]
- for cmt in others:
- next_lcas = []
- for ca in lcas:
- res = _find_lcas(lookup_parents, cmt, [ca], lookup_stamp)
- next_lcas.extend(res)
- lcas = next_lcas[:]
- self.assertEqual(set(lcas), {"2"})
-
-
-class CanFastForwardTests(TestCase):
- def test_ff(self):
- r = MemoryRepo()
- base = make_commit()
- c1 = make_commit(parents=[base.id])
- c2 = make_commit(parents=[c1.id])
- r.object_store.add_objects([(base, None), (c1, None), (c2, None)])
- self.assertTrue(can_fast_forward(r, c1.id, c1.id))
- self.assertTrue(can_fast_forward(r, base.id, c1.id))
- self.assertTrue(can_fast_forward(r, c1.id, c2.id))
- self.assertFalse(can_fast_forward(r, c2.id, c1.id))
-
- def test_diverged(self):
- r = MemoryRepo()
- base = make_commit()
- c1 = make_commit(parents=[base.id])
- c2a = make_commit(parents=[c1.id], message=b"2a")
- c2b = make_commit(parents=[c1.id], message=b"2b")
- r.object_store.add_objects([(base, None), (c1, None), (c2a, None), (c2b, None)])
- self.assertTrue(can_fast_forward(r, c1.id, c2a.id))
- self.assertTrue(can_fast_forward(r, c1.id, c2b.id))
- self.assertFalse(can_fast_forward(r, c2a.id, c2b.id))
- self.assertFalse(can_fast_forward(r, c2b.id, c2a.id))
-
-
-class WorkListTest(TestCase):
- def test_WorkList(self):
- # tuples of (timestamp, value) are stored in a Priority MaxQueue
- # repeated use of get should return them in maxheap timestamp
- # order: largest time value (most recent in time) first then earlier/older
- wlst = WorkList()
- wlst.add((100, "Test Value 1"))
- wlst.add((50, "Test Value 2"))
- wlst.add((200, "Test Value 3"))
- self.assertTrue(wlst.get() == (200, "Test Value 3"))
- self.assertTrue(wlst.get() == (100, "Test Value 1"))
- wlst.add((150, "Test Value 4"))
- self.assertTrue(wlst.get() == (150, "Test Value 4"))
- self.assertTrue(wlst.get() == (50, "Test Value 2"))
blob - 3581f2b533b9edc65773a579d070af304d7e2705 (mode 644)
blob + /dev/null
--- dulwich/tests/test_greenthreads.py
+++ /dev/null
-# test_greenthreads.py -- Unittests for eventlet.
-# Copyright (C) 2013 eNovance SAS <licensing@enovance.com>
-#
-# Author: Fabien Boucher <fabien.boucher@enovance.com>
-#
-# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
-# General Public License as public by the Free Software Foundation; version 2.0
-# or (at your option) any later version. You can redistribute it and/or
-# modify it under the terms of either of these two licenses.
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# You should have received a copy of the licenses; if not, see
-# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
-# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
-# License, Version 2.0.
-#
-
-import time
-
-from dulwich.tests import TestCase, skipIf
-
-from ..object_store import MemoryObjectStore
-from ..objects import Blob, Commit, Tree, parse_timezone
-
-try:
- import gevent # noqa: F401
-
- gevent_support = True
-except ImportError:
- gevent_support = False
-
-if gevent_support:
- from ..greenthreads import GreenThreadsMissingObjectFinder
-
-skipmsg = "Gevent library is not installed"
-
-
-def create_commit(marker=None):
- blob = Blob.from_string(b"The blob content " + marker)
- tree = Tree()
- tree.add(b"thefile " + marker, 0o100644, blob.id)
- cmt = Commit()
- cmt.tree = tree.id
- cmt.author = cmt.committer = b"John Doe <john@doe.net>"
- cmt.message = marker
- tz = parse_timezone(b"-0200")[0]
- cmt.commit_time = cmt.author_time = int(time.time())
- cmt.commit_timezone = cmt.author_timezone = tz
- return cmt, tree, blob
-
-
-def init_store(store, count=1):
- ret = []
- for i in range(count):
- objs = create_commit(marker=("%d" % i).encode("ascii"))
- for obj in objs:
- ret.append(obj)
- store.add_object(obj)
- return ret
-
-
-@skipIf(not gevent_support, skipmsg)
-class TestGreenThreadsMissingObjectFinder(TestCase):
- def setUp(self):
- super().setUp()
- self.store = MemoryObjectStore()
- self.cmt_amount = 10
- self.objs = init_store(self.store, self.cmt_amount)
-
- def test_finder(self):
- wants = [sha.id for sha in self.objs if isinstance(sha, Commit)]
- finder = GreenThreadsMissingObjectFinder(self.store, (), wants)
- self.assertEqual(len(finder.sha_done), 0)
- self.assertEqual(len(finder.objects_to_send), self.cmt_amount)
-
- finder = GreenThreadsMissingObjectFinder(
- self.store, wants[0 : int(self.cmt_amount / 2)], wants
- )
- # sha_done will contains commit id and sha of blob referred in tree
- self.assertEqual(len(finder.sha_done), (self.cmt_amount / 2) * 2)
- self.assertEqual(len(finder.objects_to_send), self.cmt_amount / 2)
blob - 34517cc5404ddc4254187f2573f817a1083256a7 (mode 644)
blob + /dev/null
--- dulwich/tests/test_hooks.py
+++ /dev/null
-# test_hooks.py -- Tests for executing hooks
-#
-# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
-# General Public License as public by the Free Software Foundation; version 2.0
-# or (at your option) any later version. You can redistribute it and/or
-# modify it under the terms of either of these two licenses.
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# You should have received a copy of the licenses; if not, see
-# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
-# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
-# License, Version 2.0.
-#
-
-"""Tests for executing hooks."""
-
-import os
-import shutil
-import stat
-import sys
-import tempfile
-
-from dulwich import errors
-from dulwich.tests import TestCase
-
-from ..hooks import CommitMsgShellHook, PostCommitShellHook, PreCommitShellHook
-
-
-class ShellHookTests(TestCase):
- def setUp(self):
- super().setUp()
- if os.name != "posix":
- self.skipTest("shell hook tests requires POSIX shell")
- self.assertTrue(os.path.exists("/bin/sh"))
-
- def test_hook_pre_commit(self):
- repo_dir = os.path.join(tempfile.mkdtemp())
- os.mkdir(os.path.join(repo_dir, "hooks"))
- self.addCleanup(shutil.rmtree, repo_dir)
-
- pre_commit_fail = """#!/bin/sh
-exit 1
-"""
-
- pre_commit_success = """#!/bin/sh
-exit 0
-"""
- pre_commit_cwd = (
- """#!/bin/sh
-if [ "$(pwd)" != '"""
- + repo_dir
- + """' ]; then
- echo "Expected path '"""
- + repo_dir
- + """', got '$(pwd)'"
- exit 1
-fi
-
-exit 0
-"""
- )
-
- pre_commit = os.path.join(repo_dir, "hooks", "pre-commit")
- hook = PreCommitShellHook(repo_dir, repo_dir)
-
- with open(pre_commit, "w") as f:
- f.write(pre_commit_fail)
- os.chmod(pre_commit, stat.S_IREAD | stat.S_IWRITE | stat.S_IEXEC)
-
- self.assertRaises(errors.HookError, hook.execute)
-
- if sys.platform != "darwin":
- # Don't bother running this test on darwin since path
- # canonicalization messages with our simple string comparison.
- with open(pre_commit, "w") as f:
- f.write(pre_commit_cwd)
- os.chmod(pre_commit, stat.S_IREAD | stat.S_IWRITE | stat.S_IEXEC)
-
- hook.execute()
-
- with open(pre_commit, "w") as f:
- f.write(pre_commit_success)
- os.chmod(pre_commit, stat.S_IREAD | stat.S_IWRITE | stat.S_IEXEC)
-
- hook.execute()
-
- def test_hook_commit_msg(self):
- repo_dir = os.path.join(tempfile.mkdtemp())
- os.mkdir(os.path.join(repo_dir, "hooks"))
- self.addCleanup(shutil.rmtree, repo_dir)
-
- commit_msg_fail = """#!/bin/sh
-exit 1
-"""
-
- commit_msg_success = """#!/bin/sh
-exit 0
-"""
-
- commit_msg_cwd = (
- """#!/bin/sh
-if [ "$(pwd)" = '"""
- + repo_dir
- + "' ]; then exit 0; else exit 1; fi\n"
- )
-
- commit_msg = os.path.join(repo_dir, "hooks", "commit-msg")
- hook = CommitMsgShellHook(repo_dir)
-
- with open(commit_msg, "w") as f:
- f.write(commit_msg_fail)
- os.chmod(commit_msg, stat.S_IREAD | stat.S_IWRITE | stat.S_IEXEC)
-
- self.assertRaises(errors.HookError, hook.execute, b"failed commit")
-
- if sys.platform != "darwin":
- # Don't bother running this test on darwin since path
- # canonicalization messages with our simple string comparison.
- with open(commit_msg, "w") as f:
- f.write(commit_msg_cwd)
- os.chmod(commit_msg, stat.S_IREAD | stat.S_IWRITE | stat.S_IEXEC)
-
- hook.execute(b"cwd test commit")
-
- with open(commit_msg, "w") as f:
- f.write(commit_msg_success)
- os.chmod(commit_msg, stat.S_IREAD | stat.S_IWRITE | stat.S_IEXEC)
-
- hook.execute(b"empty commit")
-
- def test_hook_post_commit(self):
- (fd, path) = tempfile.mkstemp()
- os.close(fd)
-
- repo_dir = os.path.join(tempfile.mkdtemp())
- os.mkdir(os.path.join(repo_dir, "hooks"))
- self.addCleanup(shutil.rmtree, repo_dir)
-
- post_commit_success = (
- """#!/bin/sh
-rm """
- + path
- + "\n"
- )
-
- post_commit_fail = """#!/bin/sh
-exit 1
-"""
-
- post_commit_cwd = (
- """#!/bin/sh
-if [ "$(pwd)" = '"""
- + repo_dir
- + "' ]; then exit 0; else exit 1; fi\n"
- )
-
- post_commit = os.path.join(repo_dir, "hooks", "post-commit")
- hook = PostCommitShellHook(repo_dir)
-
- with open(post_commit, "w") as f:
- f.write(post_commit_fail)
- os.chmod(post_commit, stat.S_IREAD | stat.S_IWRITE | stat.S_IEXEC)
-
- self.assertRaises(errors.HookError, hook.execute)
-
- if sys.platform != "darwin":
- # Don't bother running this test on darwin since path
- # canonicalization messages with our simple string comparison.
- with open(post_commit, "w") as f:
- f.write(post_commit_cwd)
- os.chmod(post_commit, stat.S_IREAD | stat.S_IWRITE | stat.S_IEXEC)
-
- hook.execute()
-
- with open(post_commit, "w") as f:
- f.write(post_commit_success)
- os.chmod(post_commit, stat.S_IREAD | stat.S_IWRITE | stat.S_IEXEC)
-
- hook.execute()
- self.assertFalse(os.path.exists(path))
blob - f9b6b6025149a40b7b5a0c012bd192d6f458ebbf (mode 644)
blob + /dev/null
--- dulwich/tests/test_ignore.py
+++ /dev/null
-# test_ignore.py -- Tests for ignore files.
-# Copyright (C) 2017 Jelmer Vernooij <jelmer@jelmer.uk>
-#
-# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
-# General Public License as public by the Free Software Foundation; version 2.0
-# or (at your option) any later version. You can redistribute it and/or
-# modify it under the terms of either of these two licenses.
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# You should have received a copy of the licenses; if not, see
-# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
-# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
-# License, Version 2.0.
-#
-
-"""Tests for ignore files."""
-
-import os
-import re
-import shutil
-import tempfile
-from io import BytesIO
-
-from dulwich.tests import TestCase
-
-from ..ignore import (
- IgnoreFilter,
- IgnoreFilterManager,
- IgnoreFilterStack,
- Pattern,
- match_pattern,
- read_ignore_patterns,
- translate,
-)
-from ..repo import Repo
-
-POSITIVE_MATCH_TESTS = [
- (b"foo.c", b"*.c"),
- (b".c", b"*.c"),
- (b"foo/foo.c", b"*.c"),
- (b"foo/foo.c", b"foo.c"),
- (b"foo.c", b"/*.c"),
- (b"foo.c", b"/foo.c"),
- (b"foo.c", b"foo.c"),
- (b"foo.c", b"foo.[ch]"),
- (b"foo/bar/bla.c", b"foo/**"),
- (b"foo/bar/bla/blie.c", b"foo/**/blie.c"),
- (b"foo/bar/bla.c", b"**/bla.c"),
- (b"bla.c", b"**/bla.c"),
- (b"foo/bar", b"foo/**/bar"),
- (b"foo/bla/bar", b"foo/**/bar"),
- (b"foo/bar/", b"bar/"),
- (b"foo/bar/", b"bar"),
- (b"foo/bar/something", b"foo/bar/*"),
-]
-
-NEGATIVE_MATCH_TESTS = [
- (b"foo.c", b"foo.[dh]"),
- (b"foo/foo.c", b"/foo.c"),
- (b"foo/foo.c", b"/*.c"),
- (b"foo/bar/", b"/bar/"),
- (b"foo/bar/", b"foo/bar/*"),
- (b"foo/bar", b"foo?bar"),
-]
-
-
-TRANSLATE_TESTS = [
- (b"*.c", b"(?ms)(.*/)?[^/]*\\.c/?\\Z"),
- (b"foo.c", b"(?ms)(.*/)?foo\\.c/?\\Z"),
- (b"/*.c", b"(?ms)[^/]*\\.c/?\\Z"),
- (b"/foo.c", b"(?ms)foo\\.c/?\\Z"),
- (b"foo.c", b"(?ms)(.*/)?foo\\.c/?\\Z"),
- (b"foo.[ch]", b"(?ms)(.*/)?foo\\.[ch]/?\\Z"),
- (b"bar/", b"(?ms)(.*/)?bar\\/\\Z"),
- (b"foo/**", b"(?ms)foo(/.*)?/?\\Z"),
- (b"foo/**/blie.c", b"(?ms)foo(/.*)?\\/blie\\.c/?\\Z"),
- (b"**/bla.c", b"(?ms)(.*/)?bla\\.c/?\\Z"),
- (b"foo/**/bar", b"(?ms)foo(/.*)?\\/bar/?\\Z"),
- (b"foo/bar/*", b"(?ms)foo\\/bar\\/[^/]+/?\\Z"),
- (b"/foo\\[bar\\]", b"(?ms)foo\\[bar\\]/?\\Z"),
- (b"/foo[bar]", b"(?ms)foo[bar]/?\\Z"),
- (b"/foo[0-9]", b"(?ms)foo[0-9]/?\\Z"),
-]
-
-
-class TranslateTests(TestCase):
- def test_translate(self):
- for pattern, regex in TRANSLATE_TESTS:
- if re.escape(b"/") == b"/":
- # Slash is no longer escaped in Python3.7, so undo the escaping
- # in the expected return value..
- regex = regex.replace(b"\\/", b"/")
- self.assertEqual(
- regex,
- translate(pattern),
- f"orig pattern: {pattern!r}, regex: {translate(pattern)!r}, expected: {regex!r}",
- )
-
-
-class ReadIgnorePatterns(TestCase):
- def test_read_file(self):
- f = BytesIO(
- b"""
-# a comment
-\x20\x20
-# and an empty line:
-
-\\#not a comment
-!negative
-with trailing whitespace
-with escaped trailing whitespace\\
-"""
- )
- self.assertEqual(
- list(read_ignore_patterns(f)),
- [
- b"\\#not a comment",
- b"!negative",
- b"with trailing whitespace",
- b"with escaped trailing whitespace ",
- ],
- )
-
-
-class MatchPatternTests(TestCase):
- def test_matches(self):
- for path, pattern in POSITIVE_MATCH_TESTS:
- self.assertTrue(
- match_pattern(path, pattern),
- f"path: {path!r}, pattern: {pattern!r}",
- )
-
- def test_no_matches(self):
- for path, pattern in NEGATIVE_MATCH_TESTS:
- self.assertFalse(
- match_pattern(path, pattern),
- f"path: {path!r}, pattern: {pattern!r}",
- )
-
-
-class IgnoreFilterTests(TestCase):
- def test_included(self):
- filter = IgnoreFilter([b"a.c", b"b.c"])
- self.assertTrue(filter.is_ignored(b"a.c"))
- self.assertIs(None, filter.is_ignored(b"c.c"))
- self.assertEqual([Pattern(b"a.c")], list(filter.find_matching(b"a.c")))
- self.assertEqual([], list(filter.find_matching(b"c.c")))
-
- def test_included_ignorecase(self):
- filter = IgnoreFilter([b"a.c", b"b.c"], ignorecase=False)
- self.assertTrue(filter.is_ignored(b"a.c"))
- self.assertFalse(filter.is_ignored(b"A.c"))
- filter = IgnoreFilter([b"a.c", b"b.c"], ignorecase=True)
- self.assertTrue(filter.is_ignored(b"a.c"))
- self.assertTrue(filter.is_ignored(b"A.c"))
- self.assertTrue(filter.is_ignored(b"A.C"))
-
- def test_excluded(self):
- filter = IgnoreFilter([b"a.c", b"b.c", b"!c.c"])
- self.assertFalse(filter.is_ignored(b"c.c"))
- self.assertIs(None, filter.is_ignored(b"d.c"))
- self.assertEqual([Pattern(b"!c.c")], list(filter.find_matching(b"c.c")))
- self.assertEqual([], list(filter.find_matching(b"d.c")))
-
- def test_include_exclude_include(self):
- filter = IgnoreFilter([b"a.c", b"!a.c", b"a.c"])
- self.assertTrue(filter.is_ignored(b"a.c"))
- self.assertEqual(
- [Pattern(b"a.c"), Pattern(b"!a.c"), Pattern(b"a.c")],
- list(filter.find_matching(b"a.c")),
- )
-
- def test_manpage(self):
- # A specific example from the gitignore manpage
- filter = IgnoreFilter([b"/*", b"!/foo", b"/foo/*", b"!/foo/bar"])
- self.assertTrue(filter.is_ignored(b"a.c"))
- self.assertTrue(filter.is_ignored(b"foo/blie"))
- self.assertFalse(filter.is_ignored(b"foo"))
- self.assertFalse(filter.is_ignored(b"foo/bar"))
- self.assertFalse(filter.is_ignored(b"foo/bar/"))
- self.assertFalse(filter.is_ignored(b"foo/bar/bloe"))
-
- def test_regex_special(self):
- # See https://github.com/dulwich/dulwich/issues/930#issuecomment-1026166429
- filter = IgnoreFilter([b"/foo\\[bar\\]", b"/foo"])
- self.assertTrue(filter.is_ignored("foo"))
- self.assertTrue(filter.is_ignored("foo[bar]"))
-
-
-class IgnoreFilterStackTests(TestCase):
- def test_stack_first(self):
- filter1 = IgnoreFilter([b"[a].c", b"[b].c", b"![d].c"])
- filter2 = IgnoreFilter([b"[a].c", b"![b],c", b"[c].c", b"[d].c"])
- stack = IgnoreFilterStack([filter1, filter2])
- self.assertIs(True, stack.is_ignored(b"a.c"))
- self.assertIs(True, stack.is_ignored(b"b.c"))
- self.assertIs(True, stack.is_ignored(b"c.c"))
- self.assertIs(False, stack.is_ignored(b"d.c"))
- self.assertIs(None, stack.is_ignored(b"e.c"))
-
-
-class IgnoreFilterManagerTests(TestCase):
- def test_load_ignore(self):
- tmp_dir = tempfile.mkdtemp()
- self.addCleanup(shutil.rmtree, tmp_dir)
- repo = Repo.init(tmp_dir)
- with open(os.path.join(repo.path, ".gitignore"), "wb") as f:
- f.write(b"/foo/bar\n")
- f.write(b"/dir2\n")
- f.write(b"/dir3/\n")
- os.mkdir(os.path.join(repo.path, "dir"))
- with open(os.path.join(repo.path, "dir", ".gitignore"), "wb") as f:
- f.write(b"/blie\n")
- with open(os.path.join(repo.path, "dir", "blie"), "wb") as f:
- f.write(b"IGNORED")
- p = os.path.join(repo.controldir(), "info", "exclude")
- with open(p, "wb") as f:
- f.write(b"/excluded\n")
- m = IgnoreFilterManager.from_repo(repo)
- self.assertTrue(m.is_ignored("dir/blie"))
- self.assertIs(None, m.is_ignored(os.path.join("dir", "bloe")))
- self.assertIs(None, m.is_ignored("dir"))
- self.assertTrue(m.is_ignored(os.path.join("foo", "bar")))
- self.assertTrue(m.is_ignored(os.path.join("excluded")))
- self.assertTrue(m.is_ignored(os.path.join("dir2", "fileinignoreddir")))
- self.assertFalse(m.is_ignored("dir3"))
- self.assertTrue(m.is_ignored("dir3/"))
- self.assertTrue(m.is_ignored("dir3/bla"))
-
- def test_nested_gitignores(self):
- tmp_dir = tempfile.mkdtemp()
- self.addCleanup(shutil.rmtree, tmp_dir)
- repo = Repo.init(tmp_dir)
-
- with open(os.path.join(repo.path, ".gitignore"), "wb") as f:
- f.write(b"/*\n")
- f.write(b"!/foo\n")
-
- os.mkdir(os.path.join(repo.path, "foo"))
- with open(os.path.join(repo.path, "foo", ".gitignore"), "wb") as f:
- f.write(b"/bar\n")
-
- with open(os.path.join(repo.path, "foo", "bar"), "wb") as f:
- f.write(b"IGNORED")
-
- m = IgnoreFilterManager.from_repo(repo)
- self.assertTrue(m.is_ignored("foo/bar"))
-
- def test_load_ignore_ignorecase(self):
- tmp_dir = tempfile.mkdtemp()
- self.addCleanup(shutil.rmtree, tmp_dir)
- repo = Repo.init(tmp_dir)
- config = repo.get_config()
- config.set(b"core", b"ignorecase", True)
- config.write_to_path()
- with open(os.path.join(repo.path, ".gitignore"), "wb") as f:
- f.write(b"/foo/bar\n")
- f.write(b"/dir\n")
- m = IgnoreFilterManager.from_repo(repo)
- self.assertTrue(m.is_ignored(os.path.join("dir", "blie")))
- self.assertTrue(m.is_ignored(os.path.join("DIR", "blie")))
-
- def test_ignored_contents(self):
- tmp_dir = tempfile.mkdtemp()
- self.addCleanup(shutil.rmtree, tmp_dir)
- repo = Repo.init(tmp_dir)
- with open(os.path.join(repo.path, ".gitignore"), "wb") as f:
- f.write(b"a/*\n")
- f.write(b"!a/*.txt\n")
- m = IgnoreFilterManager.from_repo(repo)
- os.mkdir(os.path.join(repo.path, "a"))
- self.assertIs(None, m.is_ignored("a"))
- self.assertIs(None, m.is_ignored("a/"))
- self.assertFalse(m.is_ignored("a/b.txt"))
- self.assertTrue(m.is_ignored("a/c.dat"))
blob - 28c041dc32a47d237ab85f5419a85376ff4e8102 (mode 644)
blob + /dev/null
--- dulwich/tests/test_index.py
+++ /dev/null
-# test_index.py -- Tests for the git index
-# Copyright (C) 2008-2009 Jelmer Vernooij <jelmer@jelmer.uk>
-#
-# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
-# General Public License as public by the Free Software Foundation; version 2.0
-# or (at your option) any later version. You can redistribute it and/or
-# modify it under the terms of either of these two licenses.
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# You should have received a copy of the licenses; if not, see
-# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
-# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
-# License, Version 2.0.
-#
-
-"""Tests for the index."""
-
-import os
-import shutil
-import stat
-import struct
-import sys
-import tempfile
-from io import BytesIO
-
-from dulwich.tests import TestCase, skipIf
-
-from ..index import (
- Index,
- IndexEntry,
- SerializedIndexEntry,
- _fs_to_tree_path,
- _tree_to_fs_path,
- build_index_from_tree,
- cleanup_mode,
- commit_tree,
- get_unstaged_changes,
- index_entry_from_stat,
- read_index,
- read_index_dict,
- validate_path_element_default,
- validate_path_element_ntfs,
- write_cache_time,
- write_index,
- write_index_dict,
-)
-from ..object_store import MemoryObjectStore
-from ..objects import S_IFGITLINK, Blob, Commit, Tree
-from ..repo import Repo
-
-
-def can_symlink():
- """Return whether running process can create symlinks."""
- if sys.platform != "win32":
- # Platforms other than Windows should allow symlinks without issues.
- return True
-
- test_source = tempfile.mkdtemp()
- test_target = test_source + "can_symlink"
- try:
- os.symlink(test_source, test_target)
- except (NotImplementedError, OSError):
- return False
- return True
-
-
-class IndexTestCase(TestCase):
- datadir = os.path.join(os.path.dirname(__file__), "../../testdata/indexes")
-
- def get_simple_index(self, name):
- return Index(os.path.join(self.datadir, name))
-
-
-class SimpleIndexTestCase(IndexTestCase):
- def test_len(self):
- self.assertEqual(1, len(self.get_simple_index("index")))
-
- def test_iter(self):
- self.assertEqual([b"bla"], list(self.get_simple_index("index")))
-
- def test_iterobjects(self):
- self.assertEqual(
- [(b"bla", b"e69de29bb2d1d6434b8b29ae775ad8c2e48c5391", 33188)],
- list(self.get_simple_index("index").iterobjects()),
- )
-
- def test_getitem(self):
- self.assertEqual(
- IndexEntry(
- (1230680220, 0),
- (1230680220, 0),
- 2050,
- 3761020,
- 33188,
- 1000,
- 1000,
- 0,
- b"e69de29bb2d1d6434b8b29ae775ad8c2e48c5391",
- ),
- self.get_simple_index("index")[b"bla"],
- )
-
- def test_empty(self):
- i = self.get_simple_index("notanindex")
- self.assertEqual(0, len(i))
- self.assertFalse(os.path.exists(i._filename))
-
- def test_against_empty_tree(self):
- i = self.get_simple_index("index")
- changes = list(i.changes_from_tree(MemoryObjectStore(), None))
- self.assertEqual(1, len(changes))
- (oldname, newname), (oldmode, newmode), (oldsha, newsha) = changes[0]
- self.assertEqual(b"bla", newname)
- self.assertEqual(b"e69de29bb2d1d6434b8b29ae775ad8c2e48c5391", newsha)
-
-
-class SimpleIndexWriterTestCase(IndexTestCase):
- def setUp(self):
- IndexTestCase.setUp(self)
- self.tempdir = tempfile.mkdtemp()
-
- def tearDown(self):
- IndexTestCase.tearDown(self)
- shutil.rmtree(self.tempdir)
-
- def test_simple_write(self):
- entries = [
- (
- SerializedIndexEntry(
- b"barbla",
- (1230680220, 0),
- (1230680220, 0),
- 2050,
- 3761020,
- 33188,
- 1000,
- 1000,
- 0,
- b"e69de29bb2d1d6434b8b29ae775ad8c2e48c5391",
- 0,
- 0,
- )
- )
- ]
- filename = os.path.join(self.tempdir, "test-simple-write-index")
- with open(filename, "wb+") as x:
- write_index(x, entries)
-
- with open(filename, "rb") as x:
- self.assertEqual(entries, list(read_index(x)))
-
-
-class ReadIndexDictTests(IndexTestCase):
- def setUp(self):
- IndexTestCase.setUp(self)
- self.tempdir = tempfile.mkdtemp()
-
- def tearDown(self):
- IndexTestCase.tearDown(self)
- shutil.rmtree(self.tempdir)
-
- def test_simple_write(self):
- entries = {
- b"barbla": IndexEntry(
- (1230680220, 0),
- (1230680220, 0),
- 2050,
- 3761020,
- 33188,
- 1000,
- 1000,
- 0,
- b"e69de29bb2d1d6434b8b29ae775ad8c2e48c5391",
- )
- }
- filename = os.path.join(self.tempdir, "test-simple-write-index")
- with open(filename, "wb+") as x:
- write_index_dict(x, entries)
-
- with open(filename, "rb") as x:
- self.assertEqual(entries, read_index_dict(x))
-
-
-class CommitTreeTests(TestCase):
- def setUp(self):
- super().setUp()
- self.store = MemoryObjectStore()
-
- def test_single_blob(self):
- blob = Blob()
- blob.data = b"foo"
- self.store.add_object(blob)
- blobs = [(b"bla", blob.id, stat.S_IFREG)]
- rootid = commit_tree(self.store, blobs)
- self.assertEqual(rootid, b"1a1e80437220f9312e855c37ac4398b68e5c1d50")
- self.assertEqual((stat.S_IFREG, blob.id), self.store[rootid][b"bla"])
- self.assertEqual({rootid, blob.id}, set(self.store._data.keys()))
-
- def test_nested(self):
- blob = Blob()
- blob.data = b"foo"
- self.store.add_object(blob)
- blobs = [(b"bla/bar", blob.id, stat.S_IFREG)]
- rootid = commit_tree(self.store, blobs)
- self.assertEqual(rootid, b"d92b959b216ad0d044671981196781b3258fa537")
- dirid = self.store[rootid][b"bla"][1]
- self.assertEqual(dirid, b"c1a1deb9788150829579a8b4efa6311e7b638650")
- self.assertEqual((stat.S_IFDIR, dirid), self.store[rootid][b"bla"])
- self.assertEqual((stat.S_IFREG, blob.id), self.store[dirid][b"bar"])
- self.assertEqual({rootid, dirid, blob.id}, set(self.store._data.keys()))
-
-
-class CleanupModeTests(TestCase):
- def assertModeEqual(self, expected, got):
- self.assertEqual(expected, got, f"{expected:o} != {got:o}")
-
- def test_file(self):
- self.assertModeEqual(0o100644, cleanup_mode(0o100000))
-
- def test_executable(self):
- self.assertModeEqual(0o100755, cleanup_mode(0o100711))
- self.assertModeEqual(0o100755, cleanup_mode(0o100700))
-
- def test_symlink(self):
- self.assertModeEqual(0o120000, cleanup_mode(0o120711))
-
- def test_dir(self):
- self.assertModeEqual(0o040000, cleanup_mode(0o40531))
-
- def test_submodule(self):
- self.assertModeEqual(0o160000, cleanup_mode(0o160744))
-
-
-class WriteCacheTimeTests(TestCase):
- def test_write_string(self):
- f = BytesIO()
- self.assertRaises(TypeError, write_cache_time, f, "foo")
-
- def test_write_int(self):
- f = BytesIO()
- write_cache_time(f, 434343)
- self.assertEqual(struct.pack(">LL", 434343, 0), f.getvalue())
-
- def test_write_tuple(self):
- f = BytesIO()
- write_cache_time(f, (434343, 21))
- self.assertEqual(struct.pack(">LL", 434343, 21), f.getvalue())
-
- def test_write_float(self):
- f = BytesIO()
- write_cache_time(f, 434343.000000021)
- self.assertEqual(struct.pack(">LL", 434343, 21), f.getvalue())
-
-
-class IndexEntryFromStatTests(TestCase):
- def test_simple(self):
- st = os.stat_result(
- (
- 16877,
- 131078,
- 64769,
- 154,
- 1000,
- 1000,
- 12288,
- 1323629595,
- 1324180496,
- 1324180496,
- )
- )
- entry = index_entry_from_stat(st, b"22" * 20)
- self.assertEqual(
- entry,
- IndexEntry(
- 1324180496,
- 1324180496,
- 64769,
- 131078,
- 16384,
- 1000,
- 1000,
- 12288,
- b"2222222222222222222222222222222222222222",
- ),
- )
-
- def test_override_mode(self):
- st = os.stat_result(
- (
- stat.S_IFREG + 0o644,
- 131078,
- 64769,
- 154,
- 1000,
- 1000,
- 12288,
- 1323629595,
- 1324180496,
- 1324180496,
- )
- )
- entry = index_entry_from_stat(st, b"22" * 20, mode=stat.S_IFREG + 0o755)
- self.assertEqual(
- entry,
- IndexEntry(
- 1324180496,
- 1324180496,
- 64769,
- 131078,
- 33261,
- 1000,
- 1000,
- 12288,
- b"2222222222222222222222222222222222222222",
- ),
- )
-
-
-class BuildIndexTests(TestCase):
- def assertReasonableIndexEntry(self, index_entry, mode, filesize, sha):
- self.assertEqual(index_entry.mode, mode) # mode
- self.assertEqual(index_entry.size, filesize) # filesize
- self.assertEqual(index_entry.sha, sha) # sha
-
- def assertFileContents(self, path, contents, symlink=False):
- if symlink:
- self.assertEqual(os.readlink(path), contents)
- else:
- with open(path, "rb") as f:
- self.assertEqual(f.read(), contents)
-
- def test_empty(self):
- repo_dir = tempfile.mkdtemp()
- self.addCleanup(shutil.rmtree, repo_dir)
- with Repo.init(repo_dir) as repo:
- tree = Tree()
- repo.object_store.add_object(tree)
-
- build_index_from_tree(
- repo.path, repo.index_path(), repo.object_store, tree.id
- )
-
- # Verify index entries
- index = repo.open_index()
- self.assertEqual(len(index), 0)
-
- # Verify no files
- self.assertEqual([".git"], os.listdir(repo.path))
-
- def test_git_dir(self):
- repo_dir = tempfile.mkdtemp()
- self.addCleanup(shutil.rmtree, repo_dir)
- with Repo.init(repo_dir) as repo:
- # Populate repo
- filea = Blob.from_string(b"file a")
- filee = Blob.from_string(b"d")
-
- tree = Tree()
- tree[b".git/a"] = (stat.S_IFREG | 0o644, filea.id)
- tree[b"c/e"] = (stat.S_IFREG | 0o644, filee.id)
-
- repo.object_store.add_objects([(o, None) for o in [filea, filee, tree]])
-
- build_index_from_tree(
- repo.path, repo.index_path(), repo.object_store, tree.id
- )
-
- # Verify index entries
- index = repo.open_index()
- self.assertEqual(len(index), 1)
-
- # filea
- apath = os.path.join(repo.path, ".git", "a")
- self.assertFalse(os.path.exists(apath))
-
- # filee
- epath = os.path.join(repo.path, "c", "e")
- self.assertTrue(os.path.exists(epath))
- self.assertReasonableIndexEntry(
- index[b"c/e"], stat.S_IFREG | 0o644, 1, filee.id
- )
- self.assertFileContents(epath, b"d")
-
- def test_nonempty(self):
- repo_dir = tempfile.mkdtemp()
- self.addCleanup(shutil.rmtree, repo_dir)
- with Repo.init(repo_dir) as repo:
- # Populate repo
- filea = Blob.from_string(b"file a")
- fileb = Blob.from_string(b"file b")
- filed = Blob.from_string(b"file d")
-
- tree = Tree()
- tree[b"a"] = (stat.S_IFREG | 0o644, filea.id)
- tree[b"b"] = (stat.S_IFREG | 0o644, fileb.id)
- tree[b"c/d"] = (stat.S_IFREG | 0o644, filed.id)
-
- repo.object_store.add_objects(
- [(o, None) for o in [filea, fileb, filed, tree]]
- )
-
- build_index_from_tree(
- repo.path, repo.index_path(), repo.object_store, tree.id
- )
-
- # Verify index entries
- index = repo.open_index()
- self.assertEqual(len(index), 3)
-
- # filea
- apath = os.path.join(repo.path, "a")
- self.assertTrue(os.path.exists(apath))
- self.assertReasonableIndexEntry(
- index[b"a"], stat.S_IFREG | 0o644, 6, filea.id
- )
- self.assertFileContents(apath, b"file a")
-
- # fileb
- bpath = os.path.join(repo.path, "b")
- self.assertTrue(os.path.exists(bpath))
- self.assertReasonableIndexEntry(
- index[b"b"], stat.S_IFREG | 0o644, 6, fileb.id
- )
- self.assertFileContents(bpath, b"file b")
-
- # filed
- dpath = os.path.join(repo.path, "c", "d")
- self.assertTrue(os.path.exists(dpath))
- self.assertReasonableIndexEntry(
- index[b"c/d"], stat.S_IFREG | 0o644, 6, filed.id
- )
- self.assertFileContents(dpath, b"file d")
-
- # Verify no extra files
- self.assertEqual([".git", "a", "b", "c"], sorted(os.listdir(repo.path)))
- self.assertEqual(["d"], sorted(os.listdir(os.path.join(repo.path, "c"))))
-
- @skipIf(not getattr(os, "sync", None), "Requires sync support")
- def test_norewrite(self):
- repo_dir = tempfile.mkdtemp()
- self.addCleanup(shutil.rmtree, repo_dir)
- with Repo.init(repo_dir) as repo:
- # Populate repo
- filea = Blob.from_string(b"file a")
- filea_path = os.path.join(repo_dir, "a")
- tree = Tree()
- tree[b"a"] = (stat.S_IFREG | 0o644, filea.id)
-
- repo.object_store.add_objects([(o, None) for o in [filea, tree]])
-
- # First Write
- build_index_from_tree(
- repo.path, repo.index_path(), repo.object_store, tree.id
- )
- # Use sync as metadata can be cached on some FS
- os.sync()
- mtime = os.stat(filea_path).st_mtime
-
- # Test Rewrite
- build_index_from_tree(
- repo.path, repo.index_path(), repo.object_store, tree.id
- )
- os.sync()
- self.assertEqual(mtime, os.stat(filea_path).st_mtime)
-
- # Modify content
- with open(filea_path, "wb") as fh:
- fh.write(b"test a")
- os.sync()
- mtime = os.stat(filea_path).st_mtime
-
- # Test rewrite
- build_index_from_tree(
- repo.path, repo.index_path(), repo.object_store, tree.id
- )
- os.sync()
- with open(filea_path, "rb") as fh:
- self.assertEqual(b"file a", fh.read())
-
- @skipIf(not can_symlink(), "Requires symlink support")
- def test_symlink(self):
- repo_dir = tempfile.mkdtemp()
- self.addCleanup(shutil.rmtree, repo_dir)
- with Repo.init(repo_dir) as repo:
- # Populate repo
- filed = Blob.from_string(b"file d")
- filee = Blob.from_string(b"d")
-
- tree = Tree()
- tree[b"c/d"] = (stat.S_IFREG | 0o644, filed.id)
- tree[b"c/e"] = (stat.S_IFLNK, filee.id) # symlink
-
- repo.object_store.add_objects([(o, None) for o in [filed, filee, tree]])
-
- build_index_from_tree(
- repo.path, repo.index_path(), repo.object_store, tree.id
- )
-
- # Verify index entries
- index = repo.open_index()
-
- # symlink to d
- epath = os.path.join(repo.path, "c", "e")
- self.assertTrue(os.path.exists(epath))
- self.assertReasonableIndexEntry(
- index[b"c/e"],
- stat.S_IFLNK,
- 0 if sys.platform == "win32" else 1,
- filee.id,
- )
- self.assertFileContents(epath, "d", symlink=True)
-
- def test_no_decode_encode(self):
- repo_dir = tempfile.mkdtemp()
- repo_dir_bytes = os.fsencode(repo_dir)
- self.addCleanup(shutil.rmtree, repo_dir)
- with Repo.init(repo_dir) as repo:
- # Populate repo
- file = Blob.from_string(b"foo")
-
- tree = Tree()
- latin1_name = "À".encode("latin1")
- latin1_path = os.path.join(repo_dir_bytes, latin1_name)
- utf8_name = "À".encode()
- utf8_path = os.path.join(repo_dir_bytes, utf8_name)
- tree[latin1_name] = (stat.S_IFREG | 0o644, file.id)
- tree[utf8_name] = (stat.S_IFREG | 0o644, file.id)
-
- repo.object_store.add_objects([(o, None) for o in [file, tree]])
-
- try:
- build_index_from_tree(
- repo.path, repo.index_path(), repo.object_store, tree.id
- )
- except OSError as e:
- if e.errno == 92 and sys.platform == "darwin":
- # Our filename isn't supported by the platform :(
- self.skipTest("can not write filename %r" % e.filename)
- else:
- raise
- except UnicodeDecodeError:
- # This happens e.g. with python3.6 on Windows.
- # It implicitly decodes using utf8, which doesn't work.
- self.skipTest("can not implicitly convert as utf8")
-
- # Verify index entries
- index = repo.open_index()
- self.assertIn(latin1_name, index)
- self.assertIn(utf8_name, index)
-
- self.assertTrue(os.path.exists(latin1_path))
-
- self.assertTrue(os.path.exists(utf8_path))
-
- def test_git_submodule(self):
- repo_dir = tempfile.mkdtemp()
- self.addCleanup(shutil.rmtree, repo_dir)
- with Repo.init(repo_dir) as repo:
- filea = Blob.from_string(b"file alalala")
-
- subtree = Tree()
- subtree[b"a"] = (stat.S_IFREG | 0o644, filea.id)
-
- c = Commit()
- c.tree = subtree.id
- c.committer = c.author = b"Somebody <somebody@example.com>"
- c.commit_time = c.author_time = 42342
- c.commit_timezone = c.author_timezone = 0
- c.parents = []
- c.message = b"Subcommit"
-
- tree = Tree()
- tree[b"c"] = (S_IFGITLINK, c.id)
-
- repo.object_store.add_objects([(o, None) for o in [tree]])
-
- build_index_from_tree(
- repo.path, repo.index_path(), repo.object_store, tree.id
- )
-
- # Verify index entries
- index = repo.open_index()
- self.assertEqual(len(index), 1)
-
- # filea
- apath = os.path.join(repo.path, "c/a")
- self.assertFalse(os.path.exists(apath))
-
- # dir c
- cpath = os.path.join(repo.path, "c")
- self.assertTrue(os.path.isdir(cpath))
- self.assertEqual(index[b"c"].mode, S_IFGITLINK) # mode
- self.assertEqual(index[b"c"].sha, c.id) # sha
-
- def test_git_submodule_exists(self):
- repo_dir = tempfile.mkdtemp()
- self.addCleanup(shutil.rmtree, repo_dir)
- with Repo.init(repo_dir) as repo:
- filea = Blob.from_string(b"file alalala")
-
- subtree = Tree()
- subtree[b"a"] = (stat.S_IFREG | 0o644, filea.id)
-
- c = Commit()
- c.tree = subtree.id
- c.committer = c.author = b"Somebody <somebody@example.com>"
- c.commit_time = c.author_time = 42342
- c.commit_timezone = c.author_timezone = 0
- c.parents = []
- c.message = b"Subcommit"
-
- tree = Tree()
- tree[b"c"] = (S_IFGITLINK, c.id)
-
- os.mkdir(os.path.join(repo_dir, "c"))
- repo.object_store.add_objects([(o, None) for o in [tree]])
-
- build_index_from_tree(
- repo.path, repo.index_path(), repo.object_store, tree.id
- )
-
- # Verify index entries
- index = repo.open_index()
- self.assertEqual(len(index), 1)
-
- # filea
- apath = os.path.join(repo.path, "c/a")
- self.assertFalse(os.path.exists(apath))
-
- # dir c
- cpath = os.path.join(repo.path, "c")
- self.assertTrue(os.path.isdir(cpath))
- self.assertEqual(index[b"c"].mode, S_IFGITLINK) # mode
- self.assertEqual(index[b"c"].sha, c.id) # sha
-
-
-class GetUnstagedChangesTests(TestCase):
- def test_get_unstaged_changes(self):
- """Unit test for get_unstaged_changes."""
- repo_dir = tempfile.mkdtemp()
- self.addCleanup(shutil.rmtree, repo_dir)
- with Repo.init(repo_dir) as repo:
- # Commit a dummy file then modify it
- foo1_fullpath = os.path.join(repo_dir, "foo1")
- with open(foo1_fullpath, "wb") as f:
- f.write(b"origstuff")
-
- foo2_fullpath = os.path.join(repo_dir, "foo2")
- with open(foo2_fullpath, "wb") as f:
- f.write(b"origstuff")
-
- repo.stage(["foo1", "foo2"])
- repo.do_commit(
- b"test status",
- author=b"author <email>",
- committer=b"committer <email>",
- )
-
- with open(foo1_fullpath, "wb") as f:
- f.write(b"newstuff")
-
- # modify access and modify time of path
- os.utime(foo1_fullpath, (0, 0))
-
- changes = get_unstaged_changes(repo.open_index(), repo_dir)
-
- self.assertEqual(list(changes), [b"foo1"])
-
- def test_get_unstaged_deleted_changes(self):
- """Unit test for get_unstaged_changes."""
- repo_dir = tempfile.mkdtemp()
- self.addCleanup(shutil.rmtree, repo_dir)
- with Repo.init(repo_dir) as repo:
- # Commit a dummy file then remove it
- foo1_fullpath = os.path.join(repo_dir, "foo1")
- with open(foo1_fullpath, "wb") as f:
- f.write(b"origstuff")
-
- repo.stage(["foo1"])
- repo.do_commit(
- b"test status",
- author=b"author <email>",
- committer=b"committer <email>",
- )
-
- os.unlink(foo1_fullpath)
-
- changes = get_unstaged_changes(repo.open_index(), repo_dir)
-
- self.assertEqual(list(changes), [b"foo1"])
-
- def test_get_unstaged_changes_removed_replaced_by_directory(self):
- """Unit test for get_unstaged_changes."""
- repo_dir = tempfile.mkdtemp()
- self.addCleanup(shutil.rmtree, repo_dir)
- with Repo.init(repo_dir) as repo:
- # Commit a dummy file then modify it
- foo1_fullpath = os.path.join(repo_dir, "foo1")
- with open(foo1_fullpath, "wb") as f:
- f.write(b"origstuff")
-
- repo.stage(["foo1"])
- repo.do_commit(
- b"test status",
- author=b"author <email>",
- committer=b"committer <email>",
- )
-
- os.remove(foo1_fullpath)
- os.mkdir(foo1_fullpath)
-
- changes = get_unstaged_changes(repo.open_index(), repo_dir)
-
- self.assertEqual(list(changes), [b"foo1"])
-
- @skipIf(not can_symlink(), "Requires symlink support")
- def test_get_unstaged_changes_removed_replaced_by_link(self):
- """Unit test for get_unstaged_changes."""
- repo_dir = tempfile.mkdtemp()
- self.addCleanup(shutil.rmtree, repo_dir)
- with Repo.init(repo_dir) as repo:
- # Commit a dummy file then modify it
- foo1_fullpath = os.path.join(repo_dir, "foo1")
- with open(foo1_fullpath, "wb") as f:
- f.write(b"origstuff")
-
- repo.stage(["foo1"])
- repo.do_commit(
- b"test status",
- author=b"author <email>",
- committer=b"committer <email>",
- )
-
- os.remove(foo1_fullpath)
- os.symlink(os.path.dirname(foo1_fullpath), foo1_fullpath)
-
- changes = get_unstaged_changes(repo.open_index(), repo_dir)
-
- self.assertEqual(list(changes), [b"foo1"])
-
-
-class TestValidatePathElement(TestCase):
- def test_default(self):
- self.assertTrue(validate_path_element_default(b"bla"))
- self.assertTrue(validate_path_element_default(b".bla"))
- self.assertFalse(validate_path_element_default(b".git"))
- self.assertFalse(validate_path_element_default(b".giT"))
- self.assertFalse(validate_path_element_default(b".."))
- self.assertTrue(validate_path_element_default(b"git~1"))
-
- def test_ntfs(self):
- self.assertTrue(validate_path_element_ntfs(b"bla"))
- self.assertTrue(validate_path_element_ntfs(b".bla"))
- self.assertFalse(validate_path_element_ntfs(b".git"))
- self.assertFalse(validate_path_element_ntfs(b".giT"))
- self.assertFalse(validate_path_element_ntfs(b".."))
- self.assertFalse(validate_path_element_ntfs(b"git~1"))
-
-
-class TestTreeFSPathConversion(TestCase):
- def test_tree_to_fs_path(self):
- tree_path = "délwíçh/foo".encode()
- fs_path = _tree_to_fs_path(b"/prefix/path", tree_path)
- self.assertEqual(
- fs_path,
- os.fsencode(os.path.join("/prefix/path", "délwíçh", "foo")),
- )
-
- def test_fs_to_tree_path_str(self):
- fs_path = os.path.join(os.path.join("délwíçh", "foo"))
- tree_path = _fs_to_tree_path(fs_path)
- self.assertEqual(tree_path, "délwíçh/foo".encode())
-
- def test_fs_to_tree_path_bytes(self):
- fs_path = os.path.join(os.fsencode(os.path.join("délwíçh", "foo")))
- tree_path = _fs_to_tree_path(fs_path)
- self.assertEqual(tree_path, "délwíçh/foo".encode())
blob - f2720684651af0198131cab4fdccfb4dff6687c8 (mode 644)
blob + /dev/null
--- dulwich/tests/test_lfs.py
+++ /dev/null
-# test_lfs.py -- tests for LFS
-# Copyright (C) 2020 Jelmer Vernooij <jelmer@jelmer.uk>
-#
-# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
-# General Public License as public by the Free Software Foundation; version 2.0
-# or (at your option) any later version. You can redistribute it and/or
-# modify it under the terms of either of these two licenses.
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# You should have received a copy of the licenses; if not, see
-# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
-# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
-# License, Version 2.0.
-#
-
-"""Tests for LFS support."""
-
-import shutil
-import tempfile
-
-from ..lfs import LFSStore
-from . import TestCase
-
-
-class LFSTests(TestCase):
- def setUp(self):
- super().setUp()
- self.test_dir = tempfile.mkdtemp()
- self.addCleanup(shutil.rmtree, self.test_dir)
- self.lfs = LFSStore.create(self.test_dir)
-
- def test_create(self):
- sha = self.lfs.write_object([b"a", b"b"])
- with self.lfs.open_object(sha) as f:
- self.assertEqual(b"ab", f.read())
-
- def test_missing(self):
- self.assertRaises(KeyError, self.lfs.open_object, "abcdeabcdeabcdeabcde")
blob - 9eb2a1cf9e2139ab43e11a2c3ba805d90eafc2bf (mode 644)
blob + /dev/null
--- dulwich/tests/test_line_ending.py
+++ /dev/null
-# test_line_ending.py -- Tests for the line ending functions
-# Copyright (C) 2018-2019 Boris Feld <boris.feld@comet.ml>
-#
-# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
-# General Public License as public by the Free Software Foundation; version 2.0
-# or (at your option) any later version. You can redistribute it and/or
-# modify it under the terms of either of these two licenses.
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# You should have received a copy of the licenses; if not, see
-# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
-# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
-# License, Version 2.0.
-#
-
-"""Tests for the line ending conversion."""
-
-from dulwich.tests import TestCase
-
-from ..line_ending import (
- convert_crlf_to_lf,
- convert_lf_to_crlf,
- get_checkin_filter_autocrlf,
- get_checkout_filter_autocrlf,
- normalize_blob,
-)
-from ..objects import Blob
-
-
-class LineEndingConversion(TestCase):
- """Test the line ending conversion functions in various cases."""
-
- def test_convert_crlf_to_lf_no_op(self):
- self.assertEqual(convert_crlf_to_lf(b"foobar"), b"foobar")
-
- def test_convert_crlf_to_lf(self):
- self.assertEqual(convert_crlf_to_lf(b"line1\r\nline2"), b"line1\nline2")
-
- def test_convert_crlf_to_lf_mixed(self):
- self.assertEqual(convert_crlf_to_lf(b"line1\r\n\nline2"), b"line1\n\nline2")
-
- def test_convert_lf_to_crlf_no_op(self):
- self.assertEqual(convert_lf_to_crlf(b"foobar"), b"foobar")
-
- def test_convert_lf_to_crlf(self):
- self.assertEqual(convert_lf_to_crlf(b"line1\nline2"), b"line1\r\nline2")
-
- def test_convert_lf_to_crlf_mixed(self):
- self.assertEqual(convert_lf_to_crlf(b"line1\r\n\nline2"), b"line1\r\n\r\nline2")
-
-
-class GetLineEndingAutocrlfFilters(TestCase):
- def test_get_checkin_filter_autocrlf_default(self):
- checkin_filter = get_checkin_filter_autocrlf(b"false")
-
- self.assertEqual(checkin_filter, None)
-
- def test_get_checkin_filter_autocrlf_true(self):
- checkin_filter = get_checkin_filter_autocrlf(b"true")
-
- self.assertEqual(checkin_filter, convert_crlf_to_lf)
-
- def test_get_checkin_filter_autocrlf_input(self):
- checkin_filter = get_checkin_filter_autocrlf(b"input")
-
- self.assertEqual(checkin_filter, convert_crlf_to_lf)
-
- def test_get_checkout_filter_autocrlf_default(self):
- checkout_filter = get_checkout_filter_autocrlf(b"false")
-
- self.assertEqual(checkout_filter, None)
-
- def test_get_checkout_filter_autocrlf_true(self):
- checkout_filter = get_checkout_filter_autocrlf(b"true")
-
- self.assertEqual(checkout_filter, convert_lf_to_crlf)
-
- def test_get_checkout_filter_autocrlf_input(self):
- checkout_filter = get_checkout_filter_autocrlf(b"input")
-
- self.assertEqual(checkout_filter, None)
-
-
-class NormalizeBlobTestCase(TestCase):
- def test_normalize_to_lf_no_op(self):
- base_content = b"line1\nline2"
- base_sha = "f8be7bb828880727816015d21abcbc37d033f233"
-
- base_blob = Blob()
- base_blob.set_raw_string(base_content)
-
- self.assertEqual(base_blob.as_raw_chunks(), [base_content])
- self.assertEqual(base_blob.sha().hexdigest(), base_sha)
-
- filtered_blob = normalize_blob(
- base_blob, convert_crlf_to_lf, binary_detection=False
- )
-
- self.assertEqual(filtered_blob.as_raw_chunks(), [base_content])
- self.assertEqual(filtered_blob.sha().hexdigest(), base_sha)
-
- def test_normalize_to_lf(self):
- base_content = b"line1\r\nline2"
- base_sha = "3a1bd7a52799fe5cf6411f1d35f4c10bacb1db96"
-
- base_blob = Blob()
- base_blob.set_raw_string(base_content)
-
- self.assertEqual(base_blob.as_raw_chunks(), [base_content])
- self.assertEqual(base_blob.sha().hexdigest(), base_sha)
-
- filtered_blob = normalize_blob(
- base_blob, convert_crlf_to_lf, binary_detection=False
- )
-
- normalized_content = b"line1\nline2"
- normalized_sha = "f8be7bb828880727816015d21abcbc37d033f233"
-
- self.assertEqual(filtered_blob.as_raw_chunks(), [normalized_content])
- self.assertEqual(filtered_blob.sha().hexdigest(), normalized_sha)
-
- def test_normalize_to_lf_binary(self):
- base_content = b"line1\r\nline2\0"
- base_sha = "b44504193b765f7cd79673812de8afb55b372ab2"
-
- base_blob = Blob()
- base_blob.set_raw_string(base_content)
-
- self.assertEqual(base_blob.as_raw_chunks(), [base_content])
- self.assertEqual(base_blob.sha().hexdigest(), base_sha)
-
- filtered_blob = normalize_blob(
- base_blob, convert_crlf_to_lf, binary_detection=True
- )
-
- self.assertEqual(filtered_blob.as_raw_chunks(), [base_content])
- self.assertEqual(filtered_blob.sha().hexdigest(), base_sha)
-
- def test_normalize_to_crlf_no_op(self):
- base_content = b"line1\r\nline2"
- base_sha = "3a1bd7a52799fe5cf6411f1d35f4c10bacb1db96"
-
- base_blob = Blob()
- base_blob.set_raw_string(base_content)
-
- self.assertEqual(base_blob.as_raw_chunks(), [base_content])
- self.assertEqual(base_blob.sha().hexdigest(), base_sha)
-
- filtered_blob = normalize_blob(
- base_blob, convert_lf_to_crlf, binary_detection=False
- )
-
- self.assertEqual(filtered_blob.as_raw_chunks(), [base_content])
- self.assertEqual(filtered_blob.sha().hexdigest(), base_sha)
-
- def test_normalize_to_crlf(self):
- base_content = b"line1\nline2"
- base_sha = "f8be7bb828880727816015d21abcbc37d033f233"
-
- base_blob = Blob()
- base_blob.set_raw_string(base_content)
-
- self.assertEqual(base_blob.as_raw_chunks(), [base_content])
- self.assertEqual(base_blob.sha().hexdigest(), base_sha)
-
- filtered_blob = normalize_blob(
- base_blob, convert_lf_to_crlf, binary_detection=False
- )
-
- normalized_content = b"line1\r\nline2"
- normalized_sha = "3a1bd7a52799fe5cf6411f1d35f4c10bacb1db96"
-
- self.assertEqual(filtered_blob.as_raw_chunks(), [normalized_content])
- self.assertEqual(filtered_blob.sha().hexdigest(), normalized_sha)
-
- def test_normalize_to_crlf_binary(self):
- base_content = b"line1\r\nline2\0"
- base_sha = "b44504193b765f7cd79673812de8afb55b372ab2"
-
- base_blob = Blob()
- base_blob.set_raw_string(base_content)
-
- self.assertEqual(base_blob.as_raw_chunks(), [base_content])
- self.assertEqual(base_blob.sha().hexdigest(), base_sha)
-
- filtered_blob = normalize_blob(
- base_blob, convert_lf_to_crlf, binary_detection=True
- )
-
- self.assertEqual(filtered_blob.as_raw_chunks(), [base_content])
- self.assertEqual(filtered_blob.sha().hexdigest(), base_sha)
blob - d15b15d5073b17e99ee334f60d6feaa19206c98d (mode 644)
blob + /dev/null
--- dulwich/tests/test_lru_cache.py
+++ /dev/null
-# Copyright (C) 2006, 2008 Canonical Ltd
-#
-# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
-# General Public License as public by the Free Software Foundation; version 2.0
-# or (at your option) any later version. You can redistribute it and/or
-# modify it under the terms of either of these two licenses.
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# You should have received a copy of the licenses; if not, see
-# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
-# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
-# License, Version 2.0.
-#
-
-"""Tests for the lru_cache module."""
-
-from dulwich import lru_cache
-from dulwich.tests import TestCase
-
-
-class TestLRUCache(TestCase):
- """Test that LRU cache properly keeps track of entries."""
-
- def test_cache_size(self):
- cache = lru_cache.LRUCache(max_cache=10)
- self.assertEqual(10, cache.cache_size())
-
- cache = lru_cache.LRUCache(max_cache=256)
- self.assertEqual(256, cache.cache_size())
-
- cache.resize(512)
- self.assertEqual(512, cache.cache_size())
-
- def test_missing(self):
- cache = lru_cache.LRUCache(max_cache=10)
-
- self.assertNotIn("foo", cache)
- self.assertRaises(KeyError, cache.__getitem__, "foo")
-
- cache["foo"] = "bar"
- self.assertEqual("bar", cache["foo"])
- self.assertIn("foo", cache)
- self.assertNotIn("bar", cache)
-
- def test_map_None(self):
- # Make sure that we can properly map None as a key.
- cache = lru_cache.LRUCache(max_cache=10)
- self.assertNotIn(None, cache)
- cache[None] = 1
- self.assertEqual(1, cache[None])
- cache[None] = 2
- self.assertEqual(2, cache[None])
- # Test the various code paths of __getitem__, to make sure that we can
- # handle when None is the key for the LRU and the MRU
- cache[1] = 3
- cache[None] = 1
- cache[None]
- cache[1]
- cache[None]
- self.assertEqual([None, 1], [n.key for n in cache._walk_lru()])
-
- def test_add__null_key(self):
- cache = lru_cache.LRUCache(max_cache=10)
- self.assertRaises(ValueError, cache.add, lru_cache._null_key, 1)
-
- def test_overflow(self):
- """Adding extra entries will pop out old ones."""
- cache = lru_cache.LRUCache(max_cache=1, after_cleanup_count=1)
-
- cache["foo"] = "bar"
- # With a max cache of 1, adding 'baz' should pop out 'foo'
- cache["baz"] = "biz"
-
- self.assertNotIn("foo", cache)
- self.assertIn("baz", cache)
-
- self.assertEqual("biz", cache["baz"])
-
- def test_by_usage(self):
- """Accessing entries bumps them up in priority."""
- cache = lru_cache.LRUCache(max_cache=2)
-
- cache["baz"] = "biz"
- cache["foo"] = "bar"
-
- self.assertEqual("biz", cache["baz"])
-
- # This must kick out 'foo' because it was the last accessed
- cache["nub"] = "in"
-
- self.assertNotIn("foo", cache)
-
- def test_cleanup(self):
- """Test that we can use a cleanup function."""
- cleanup_called = []
-
- def cleanup_func(key, val):
- cleanup_called.append((key, val))
-
- cache = lru_cache.LRUCache(max_cache=2, after_cleanup_count=2)
-
- cache.add("baz", "1", cleanup=cleanup_func)
- cache.add("foo", "2", cleanup=cleanup_func)
- cache.add("biz", "3", cleanup=cleanup_func)
-
- self.assertEqual([("baz", "1")], cleanup_called)
-
- # 'foo' is now most recent, so final cleanup will call it last
- cache["foo"]
- cache.clear()
- self.assertEqual([("baz", "1"), ("biz", "3"), ("foo", "2")], cleanup_called)
-
- def test_cleanup_on_replace(self):
- """Replacing an object should cleanup the old value."""
- cleanup_called = []
-
- def cleanup_func(key, val):
- cleanup_called.append((key, val))
-
- cache = lru_cache.LRUCache(max_cache=2)
- cache.add(1, 10, cleanup=cleanup_func)
- cache.add(2, 20, cleanup=cleanup_func)
- cache.add(2, 25, cleanup=cleanup_func)
-
- self.assertEqual([(2, 20)], cleanup_called)
- self.assertEqual(25, cache[2])
-
- # Even __setitem__ should make sure cleanup() is called
- cache[2] = 26
- self.assertEqual([(2, 20), (2, 25)], cleanup_called)
-
- def test_len(self):
- cache = lru_cache.LRUCache(max_cache=10, after_cleanup_count=10)
-
- cache[1] = 10
- cache[2] = 20
- cache[3] = 30
- cache[4] = 40
-
- self.assertEqual(4, len(cache))
-
- cache[5] = 50
- cache[6] = 60
- cache[7] = 70
- cache[8] = 80
-
- self.assertEqual(8, len(cache))
-
- cache[1] = 15 # replacement
-
- self.assertEqual(8, len(cache))
-
- cache[9] = 90
- cache[10] = 100
- cache[11] = 110
-
- # We hit the max
- self.assertEqual(10, len(cache))
- self.assertEqual(
- [11, 10, 9, 1, 8, 7, 6, 5, 4, 3],
- [n.key for n in cache._walk_lru()],
- )
-
- def test_cleanup_shrinks_to_after_clean_count(self):
- cache = lru_cache.LRUCache(max_cache=5, after_cleanup_count=3)
-
- cache.add(1, 10)
- cache.add(2, 20)
- cache.add(3, 25)
- cache.add(4, 30)
- cache.add(5, 35)
-
- self.assertEqual(5, len(cache))
- # This will bump us over the max, which causes us to shrink down to
- # after_cleanup_cache size
- cache.add(6, 40)
- self.assertEqual(3, len(cache))
-
- def test_after_cleanup_larger_than_max(self):
- cache = lru_cache.LRUCache(max_cache=5, after_cleanup_count=10)
- self.assertEqual(5, cache._after_cleanup_count)
-
- def test_after_cleanup_none(self):
- cache = lru_cache.LRUCache(max_cache=5, after_cleanup_count=None)
- # By default _after_cleanup_size is 80% of the normal size
- self.assertEqual(4, cache._after_cleanup_count)
-
- def test_cleanup_2(self):
- cache = lru_cache.LRUCache(max_cache=5, after_cleanup_count=2)
-
- # Add these in order
- cache.add(1, 10)
- cache.add(2, 20)
- cache.add(3, 25)
- cache.add(4, 30)
- cache.add(5, 35)
-
- self.assertEqual(5, len(cache))
- # Force a compaction
- cache.cleanup()
- self.assertEqual(2, len(cache))
-
- def test_preserve_last_access_order(self):
- cache = lru_cache.LRUCache(max_cache=5)
-
- # Add these in order
- cache.add(1, 10)
- cache.add(2, 20)
- cache.add(3, 25)
- cache.add(4, 30)
- cache.add(5, 35)
-
- self.assertEqual([5, 4, 3, 2, 1], [n.key for n in cache._walk_lru()])
-
- # Now access some randomly
- cache[2]
- cache[5]
- cache[3]
- cache[2]
- self.assertEqual([2, 3, 5, 4, 1], [n.key for n in cache._walk_lru()])
-
- def test_get(self):
- cache = lru_cache.LRUCache(max_cache=5)
-
- cache.add(1, 10)
- cache.add(2, 20)
- self.assertEqual(20, cache.get(2))
- self.assertEqual(None, cache.get(3))
- obj = object()
- self.assertIs(obj, cache.get(3, obj))
- self.assertEqual([2, 1], [n.key for n in cache._walk_lru()])
- self.assertEqual(10, cache.get(1))
- self.assertEqual([1, 2], [n.key for n in cache._walk_lru()])
-
- def test_keys(self):
- cache = lru_cache.LRUCache(max_cache=5, after_cleanup_count=5)
-
- cache[1] = 2
- cache[2] = 3
- cache[3] = 4
- self.assertEqual([1, 2, 3], sorted(cache.keys()))
- cache[4] = 5
- cache[5] = 6
- cache[6] = 7
- self.assertEqual([2, 3, 4, 5, 6], sorted(cache.keys()))
-
- def test_resize_smaller(self):
- cache = lru_cache.LRUCache(max_cache=5, after_cleanup_count=4)
- cache[1] = 2
- cache[2] = 3
- cache[3] = 4
- cache[4] = 5
- cache[5] = 6
- self.assertEqual([1, 2, 3, 4, 5], sorted(cache.keys()))
- cache[6] = 7
- self.assertEqual([3, 4, 5, 6], sorted(cache.keys()))
- # Now resize to something smaller, which triggers a cleanup
- cache.resize(max_cache=3, after_cleanup_count=2)
- self.assertEqual([5, 6], sorted(cache.keys()))
- # Adding something will use the new size
- cache[7] = 8
- self.assertEqual([5, 6, 7], sorted(cache.keys()))
- cache[8] = 9
- self.assertEqual([7, 8], sorted(cache.keys()))
-
- def test_resize_larger(self):
- cache = lru_cache.LRUCache(max_cache=5, after_cleanup_count=4)
- cache[1] = 2
- cache[2] = 3
- cache[3] = 4
- cache[4] = 5
- cache[5] = 6
- self.assertEqual([1, 2, 3, 4, 5], sorted(cache.keys()))
- cache[6] = 7
- self.assertEqual([3, 4, 5, 6], sorted(cache.keys()))
- cache.resize(max_cache=8, after_cleanup_count=6)
- self.assertEqual([3, 4, 5, 6], sorted(cache.keys()))
- cache[7] = 8
- cache[8] = 9
- cache[9] = 10
- cache[10] = 11
- self.assertEqual([3, 4, 5, 6, 7, 8, 9, 10], sorted(cache.keys()))
- cache[11] = 12 # triggers cleanup back to new after_cleanup_count
- self.assertEqual([6, 7, 8, 9, 10, 11], sorted(cache.keys()))
-
-
-class TestLRUSizeCache(TestCase):
- def test_basic_init(self):
- cache = lru_cache.LRUSizeCache()
- self.assertEqual(2048, cache._max_cache)
- self.assertEqual(int(cache._max_size * 0.8), cache._after_cleanup_size)
- self.assertEqual(0, cache._value_size)
-
- def test_add__null_key(self):
- cache = lru_cache.LRUSizeCache()
- self.assertRaises(ValueError, cache.add, lru_cache._null_key, 1)
-
- def test_add_tracks_size(self):
- cache = lru_cache.LRUSizeCache()
- self.assertEqual(0, cache._value_size)
- cache.add("my key", "my value text")
- self.assertEqual(13, cache._value_size)
-
- def test_remove_tracks_size(self):
- cache = lru_cache.LRUSizeCache()
- self.assertEqual(0, cache._value_size)
- cache.add("my key", "my value text")
- self.assertEqual(13, cache._value_size)
- node = cache._cache["my key"]
- cache._remove_node(node)
- self.assertEqual(0, cache._value_size)
-
- def test_no_add_over_size(self):
- """Adding a large value may not be cached at all."""
- cache = lru_cache.LRUSizeCache(max_size=10, after_cleanup_size=5)
- self.assertEqual(0, cache._value_size)
- self.assertEqual({}, cache.items())
- cache.add("test", "key")
- self.assertEqual(3, cache._value_size)
- self.assertEqual({"test": "key"}, cache.items())
- cache.add("test2", "key that is too big")
- self.assertEqual(3, cache._value_size)
- self.assertEqual({"test": "key"}, cache.items())
- # If we would add a key, only to cleanup and remove all cached entries,
- # then obviously that value should not be stored
- cache.add("test3", "bigkey")
- self.assertEqual(3, cache._value_size)
- self.assertEqual({"test": "key"}, cache.items())
-
- cache.add("test4", "bikey")
- self.assertEqual(3, cache._value_size)
- self.assertEqual({"test": "key"}, cache.items())
-
- def test_no_add_over_size_cleanup(self):
- """If a large value is not cached, we will call cleanup right away."""
- cleanup_calls = []
-
- def cleanup(key, value):
- cleanup_calls.append((key, value))
-
- cache = lru_cache.LRUSizeCache(max_size=10, after_cleanup_size=5)
- self.assertEqual(0, cache._value_size)
- self.assertEqual({}, cache.items())
- cache.add("test", "key that is too big", cleanup=cleanup)
- # key was not added
- self.assertEqual(0, cache._value_size)
- self.assertEqual({}, cache.items())
- # and cleanup was called
- self.assertEqual([("test", "key that is too big")], cleanup_calls)
-
- def test_adding_clears_cache_based_on_size(self):
- """The cache is cleared in LRU order until small enough."""
- cache = lru_cache.LRUSizeCache(max_size=20)
- cache.add("key1", "value") # 5 chars
- cache.add("key2", "value2") # 6 chars
- cache.add("key3", "value23") # 7 chars
- self.assertEqual(5 + 6 + 7, cache._value_size)
- cache["key2"] # reference key2 so it gets a newer reference time
- cache.add("key4", "value234") # 8 chars, over limit
- # We have to remove 2 keys to get back under limit
- self.assertEqual(6 + 8, cache._value_size)
- self.assertEqual({"key2": "value2", "key4": "value234"}, cache.items())
-
- def test_adding_clears_to_after_cleanup_size(self):
- cache = lru_cache.LRUSizeCache(max_size=20, after_cleanup_size=10)
- cache.add("key1", "value") # 5 chars
- cache.add("key2", "value2") # 6 chars
- cache.add("key3", "value23") # 7 chars
- self.assertEqual(5 + 6 + 7, cache._value_size)
- cache["key2"] # reference key2 so it gets a newer reference time
- cache.add("key4", "value234") # 8 chars, over limit
- # We have to remove 3 keys to get back under limit
- self.assertEqual(8, cache._value_size)
- self.assertEqual({"key4": "value234"}, cache.items())
-
- def test_custom_sizes(self):
- def size_of_list(lst):
- return sum(len(x) for x in lst)
-
- cache = lru_cache.LRUSizeCache(
- max_size=20, after_cleanup_size=10, compute_size=size_of_list
- )
-
- cache.add("key1", ["val", "ue"]) # 5 chars
- cache.add("key2", ["val", "ue2"]) # 6 chars
- cache.add("key3", ["val", "ue23"]) # 7 chars
- self.assertEqual(5 + 6 + 7, cache._value_size)
- cache["key2"] # reference key2 so it gets a newer reference time
- cache.add("key4", ["value", "234"]) # 8 chars, over limit
- # We have to remove 3 keys to get back under limit
- self.assertEqual(8, cache._value_size)
- self.assertEqual({"key4": ["value", "234"]}, cache.items())
-
- def test_cleanup(self):
- cache = lru_cache.LRUSizeCache(max_size=20, after_cleanup_size=10)
-
- # Add these in order
- cache.add("key1", "value") # 5 chars
- cache.add("key2", "value2") # 6 chars
- cache.add("key3", "value23") # 7 chars
- self.assertEqual(5 + 6 + 7, cache._value_size)
-
- cache.cleanup()
- # Only the most recent fits after cleaning up
- self.assertEqual(7, cache._value_size)
-
- def test_keys(self):
- cache = lru_cache.LRUSizeCache(max_size=10)
-
- cache[1] = "a"
- cache[2] = "b"
- cache[3] = "cdef"
- self.assertEqual([1, 2, 3], sorted(cache.keys()))
-
- def test_resize_smaller(self):
- cache = lru_cache.LRUSizeCache(max_size=10, after_cleanup_size=9)
- cache[1] = "abc"
- cache[2] = "def"
- cache[3] = "ghi"
- cache[4] = "jkl"
- # Triggers a cleanup
- self.assertEqual([2, 3, 4], sorted(cache.keys()))
- # Resize should also cleanup again
- cache.resize(max_size=6, after_cleanup_size=4)
- self.assertEqual([4], sorted(cache.keys()))
- # Adding should use the new max size
- cache[5] = "mno"
- self.assertEqual([4, 5], sorted(cache.keys()))
- cache[6] = "pqr"
- self.assertEqual([6], sorted(cache.keys()))
-
- def test_resize_larger(self):
- cache = lru_cache.LRUSizeCache(max_size=10, after_cleanup_size=9)
- cache[1] = "abc"
- cache[2] = "def"
- cache[3] = "ghi"
- cache[4] = "jkl"
- # Triggers a cleanup
- self.assertEqual([2, 3, 4], sorted(cache.keys()))
- cache.resize(max_size=15, after_cleanup_size=12)
- self.assertEqual([2, 3, 4], sorted(cache.keys()))
- cache[5] = "mno"
- cache[6] = "pqr"
- self.assertEqual([2, 3, 4, 5, 6], sorted(cache.keys()))
- cache[7] = "stu"
- self.assertEqual([4, 5, 6, 7], sorted(cache.keys()))
blob - bbb392f16fb45ccd75df163c86cc41091eb76b68 (mode 644)
blob + /dev/null
--- dulwich/tests/test_mailmap.py
+++ /dev/null
-# test_mailmap.py -- Tests for dulwich.mailmap
-# Copyright (C) 2018 Jelmer Vernooij <jelmer@jelmer.uk>
-#
-# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
-# General Public License as public by the Free Software Foundation; version 2.0
-# or (at your option) any later version. You can redistribute it and/or
-# modify it under the terms of either of these two licenses.
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# You should have received a copy of the licenses; if not, see
-# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
-# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
-# License, Version 2.0.
-#
-
-"""Tests for dulwich.mailmap."""
-
-from io import BytesIO
-from unittest import TestCase
-
-from ..mailmap import Mailmap, read_mailmap
-
-
-class ReadMailmapTests(TestCase):
- def test_read(self):
- b = BytesIO(
- b"""\
-Jane Doe <jane@desktop.(none)>
-Joe R. Developer <joe@example.com>
-# A comment
-<cto@company.xx> <cto@coompany.xx> # Comment
-Some Dude <some@dude.xx> nick1 <bugs@company.xx>
-Other Author <other@author.xx> nick2 <bugs@company.xx>
-Other Author <other@author.xx> <nick2@company.xx>
-Santa Claus <santa.claus@northpole.xx> <me@company.xx>
-"""
- )
- self.assertEqual(
- [
- ((b"Jane Doe", b"jane@desktop.(none)"), None),
- ((b"Joe R. Developer", b"joe@example.com"), None),
- ((None, b"cto@company.xx"), (None, b"cto@coompany.xx")),
- (
- (b"Some Dude", b"some@dude.xx"),
- (b"nick1", b"bugs@company.xx"),
- ),
- (
- (b"Other Author", b"other@author.xx"),
- (b"nick2", b"bugs@company.xx"),
- ),
- (
- (b"Other Author", b"other@author.xx"),
- (None, b"nick2@company.xx"),
- ),
- (
- (b"Santa Claus", b"santa.claus@northpole.xx"),
- (None, b"me@company.xx"),
- ),
- ],
- list(read_mailmap(b)),
- )
-
-
-class MailmapTests(TestCase):
- def test_lookup(self):
- m = Mailmap()
- m.add_entry((b"Jane Doe", b"jane@desktop.(none)"), (None, None))
- m.add_entry((b"Joe R. Developer", b"joe@example.com"), None)
- m.add_entry((None, b"cto@company.xx"), (None, b"cto@coompany.xx"))
- m.add_entry((b"Some Dude", b"some@dude.xx"), (b"nick1", b"bugs@company.xx"))
- m.add_entry(
- (b"Other Author", b"other@author.xx"),
- (b"nick2", b"bugs@company.xx"),
- )
- m.add_entry((b"Other Author", b"other@author.xx"), (None, b"nick2@company.xx"))
- m.add_entry(
- (b"Santa Claus", b"santa.claus@northpole.xx"),
- (None, b"me@company.xx"),
- )
- self.assertEqual(
- b"Jane Doe <jane@desktop.(none)>",
- m.lookup(b"Jane Doe <jane@desktop.(none)>"),
- )
- self.assertEqual(
- b"Jane Doe <jane@desktop.(none)>",
- m.lookup(b"Jane Doe <jane@example.com>"),
- )
- self.assertEqual(
- b"Jane Doe <jane@desktop.(none)>",
- m.lookup(b"Jane D. <jane@desktop.(none)>"),
- )
- self.assertEqual(
- b"Some Dude <some@dude.xx>", m.lookup(b"nick1 <bugs@company.xx>")
- )
- self.assertEqual(b"CTO <cto@company.xx>", m.lookup(b"CTO <cto@coompany.xx>"))
blob - f11b205b14eb90d4c9e1673760281ea09a9c7089 (mode 644)
blob + /dev/null
--- dulwich/tests/test_missing_obj_finder.py
+++ /dev/null
-# test_missing_obj_finder.py -- tests for MissingObjectFinder
-# Copyright (C) 2012 syntevo GmbH
-#
-# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
-# General Public License as public by the Free Software Foundation; version 2.0
-# or (at your option) any later version. You can redistribute it and/or
-# modify it under the terms of either of these two licenses.
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# You should have received a copy of the licenses; if not, see
-# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
-# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
-# License, Version 2.0.
-#
-
-from dulwich.tests import TestCase
-
-from ..object_store import MemoryObjectStore, MissingObjectFinder
-from ..objects import Blob
-from .utils import build_commit_graph, make_object, make_tag
-
-
-class MissingObjectFinderTest(TestCase):
- def setUp(self):
- super().setUp()
- self.store = MemoryObjectStore()
- self.commits = []
-
- def cmt(self, n):
- return self.commits[n - 1]
-
- def assertMissingMatch(self, haves, wants, expected):
- for sha, path in MissingObjectFinder(self.store, haves, wants, shallow=set()):
- self.assertIn(
- sha, expected, f"({sha},{path}) erroneously reported as missing"
- )
- expected.remove(sha)
-
- self.assertEqual(
- len(expected),
- 0,
- f"some objects are not reported as missing: {expected}",
- )
-
-
-class MOFLinearRepoTest(MissingObjectFinderTest):
- def setUp(self):
- super().setUp()
- # present in 1, removed in 3
- f1_1 = make_object(Blob, data=b"f1")
- # present in all revisions, changed in 2 and 3
- f2_1 = make_object(Blob, data=b"f2")
- f2_2 = make_object(Blob, data=b"f2-changed")
- f2_3 = make_object(Blob, data=b"f2-changed-again")
- # added in 2, left unmodified in 3
- f3_2 = make_object(Blob, data=b"f3")
-
- commit_spec = [[1], [2, 1], [3, 2]]
- trees = {
- 1: [(b"f1", f1_1), (b"f2", f2_1)],
- 2: [(b"f1", f1_1), (b"f2", f2_2), (b"f3", f3_2)],
- 3: [(b"f2", f2_3), (b"f3", f3_2)],
- }
- # commit 1: f1 and f2
- # commit 2: f3 added, f2 changed. Missing shall report commit id and a
- # tree referenced by commit
- # commit 3: f1 removed, f2 changed. Commit sha and root tree sha shall
- # be reported as modified
- self.commits = build_commit_graph(self.store, commit_spec, trees)
- self.missing_1_2 = [self.cmt(2).id, self.cmt(2).tree, f2_2.id, f3_2.id]
- self.missing_2_3 = [self.cmt(3).id, self.cmt(3).tree, f2_3.id]
- self.missing_1_3 = [
- self.cmt(2).id,
- self.cmt(3).id,
- self.cmt(2).tree,
- self.cmt(3).tree,
- f2_2.id,
- f3_2.id,
- f2_3.id,
- ]
-
- def test_1_to_2(self):
- self.assertMissingMatch([self.cmt(1).id], [self.cmt(2).id], self.missing_1_2)
-
- def test_2_to_3(self):
- self.assertMissingMatch([self.cmt(2).id], [self.cmt(3).id], self.missing_2_3)
-
- def test_1_to_3(self):
- self.assertMissingMatch([self.cmt(1).id], [self.cmt(3).id], self.missing_1_3)
-
- def test_bogus_haves(self):
- """Ensure non-existent SHA in haves are tolerated."""
- bogus_sha = self.cmt(2).id[::-1]
- haves = [self.cmt(1).id, bogus_sha]
- wants = [self.cmt(3).id]
- self.assertMissingMatch(haves, wants, self.missing_1_3)
-
- def test_bogus_wants_failure(self):
- """Ensure non-existent SHA in wants are not tolerated."""
- bogus_sha = self.cmt(2).id[::-1]
- haves = [self.cmt(1).id]
- wants = [self.cmt(3).id, bogus_sha]
- self.assertRaises(
- KeyError, MissingObjectFinder, self.store, haves, wants, shallow=set()
- )
-
- def test_no_changes(self):
- self.assertMissingMatch([self.cmt(3).id], [self.cmt(3).id], [])
-
-
-class MOFMergeForkRepoTest(MissingObjectFinderTest):
- # 1 --- 2 --- 4 --- 6 --- 7
- # \ /
- # 3 ---
- # \
- # 5
-
- def setUp(self):
- super().setUp()
- f1_1 = make_object(Blob, data=b"f1")
- f1_2 = make_object(Blob, data=b"f1-2")
- f1_4 = make_object(Blob, data=b"f1-4")
- f1_7 = make_object(Blob, data=b"f1-2") # same data as in rev 2
- f2_1 = make_object(Blob, data=b"f2")
- f2_3 = make_object(Blob, data=b"f2-3")
- f3_3 = make_object(Blob, data=b"f3")
- f3_5 = make_object(Blob, data=b"f3-5")
- commit_spec = [[1], [2, 1], [3, 2], [4, 2], [5, 3], [6, 3, 4], [7, 6]]
- trees = {
- 1: [(b"f1", f1_1), (b"f2", f2_1)],
- 2: [(b"f1", f1_2), (b"f2", f2_1)], # f1 changed
- # f3 added, f2 changed
- 3: [(b"f1", f1_2), (b"f2", f2_3), (b"f3", f3_3)],
- 4: [(b"f1", f1_4), (b"f2", f2_1)], # f1 changed
- 5: [(b"f1", f1_2), (b"f3", f3_5)], # f2 removed, f3 changed
- # merged 3 and 4
- 6: [(b"f1", f1_4), (b"f2", f2_3), (b"f3", f3_3)],
- # f1 changed to match rev2. f3 removed
- 7: [(b"f1", f1_7), (b"f2", f2_3)],
- }
- self.commits = build_commit_graph(self.store, commit_spec, trees)
-
- self.f1_2_id = f1_2.id
- self.f1_4_id = f1_4.id
- self.f1_7_id = f1_7.id
- self.f2_3_id = f2_3.id
- self.f3_3_id = f3_3.id
-
- self.assertEqual(f1_2.id, f1_7.id, "[sanity]")
-
- def test_have6_want7(self):
- # have 6, want 7. Ideally, shall not report f1_7 as it's the same as
- # f1_2, however, to do so, MissingObjectFinder shall not record trees
- # of common commits only, but also all parent trees and tree items,
- # which is an overkill (i.e. in sha_done it records f1_4 as known, and
- # doesn't record f1_2 was known prior to that, hence can't detect f1_7
- # is in fact f1_2 and shall not be reported)
- self.assertMissingMatch(
- [self.cmt(6).id],
- [self.cmt(7).id],
- [self.cmt(7).id, self.cmt(7).tree, self.f1_7_id],
- )
-
- def test_have4_want7(self):
- # have 4, want 7. Shall not include rev5 as it is not in the tree
- # between 4 and 7 (well, it is, but its SHA's are irrelevant for 4..7
- # commit hierarchy)
- self.assertMissingMatch(
- [self.cmt(4).id],
- [self.cmt(7).id],
- [
- self.cmt(7).id,
- self.cmt(6).id,
- self.cmt(3).id,
- self.cmt(7).tree,
- self.cmt(6).tree,
- self.cmt(3).tree,
- self.f2_3_id,
- self.f3_3_id,
- ],
- )
-
- def test_have1_want6(self):
- # have 1, want 6. Shall not include rev5
- self.assertMissingMatch(
- [self.cmt(1).id],
- [self.cmt(6).id],
- [
- self.cmt(6).id,
- self.cmt(4).id,
- self.cmt(3).id,
- self.cmt(2).id,
- self.cmt(6).tree,
- self.cmt(4).tree,
- self.cmt(3).tree,
- self.cmt(2).tree,
- self.f1_2_id,
- self.f1_4_id,
- self.f2_3_id,
- self.f3_3_id,
- ],
- )
-
- def test_have3_want6(self):
- # have 3, want 7. Shall not report rev2 and its tree, because
- # haves(3) means has parents, i.e. rev2, too
- # BUT shall report any changes descending rev2 (excluding rev3)
- # Shall NOT report f1_7 as it's technically == f1_2
- self.assertMissingMatch(
- [self.cmt(3).id],
- [self.cmt(7).id],
- [
- self.cmt(7).id,
- self.cmt(6).id,
- self.cmt(4).id,
- self.cmt(7).tree,
- self.cmt(6).tree,
- self.cmt(4).tree,
- self.f1_4_id,
- ],
- )
-
- def test_have5_want7(self):
- # have 5, want 7. Common parent is rev2, hence children of rev2 from
- # a descent line other than rev5 shall be reported
- # expects f1_4 from rev6. f3_5 is known in rev5;
- # f1_7 shall be the same as f1_2 (known, too)
- self.assertMissingMatch(
- [self.cmt(5).id],
- [self.cmt(7).id],
- [
- self.cmt(7).id,
- self.cmt(6).id,
- self.cmt(4).id,
- self.cmt(7).tree,
- self.cmt(6).tree,
- self.cmt(4).tree,
- self.f1_4_id,
- ],
- )
-
-
-class MOFTagsTest(MissingObjectFinderTest):
- def setUp(self):
- super().setUp()
- f1_1 = make_object(Blob, data=b"f1")
- commit_spec = [[1]]
- trees = {1: [(b"f1", f1_1)]}
- self.commits = build_commit_graph(self.store, commit_spec, trees)
-
- self._normal_tag = make_tag(self.cmt(1))
- self.store.add_object(self._normal_tag)
-
- self._tag_of_tag = make_tag(self._normal_tag)
- self.store.add_object(self._tag_of_tag)
-
- self._tag_of_tree = make_tag(self.store[self.cmt(1).tree])
- self.store.add_object(self._tag_of_tree)
-
- self._tag_of_blob = make_tag(f1_1)
- self.store.add_object(self._tag_of_blob)
-
- self._tag_of_tag_of_blob = make_tag(self._tag_of_blob)
- self.store.add_object(self._tag_of_tag_of_blob)
-
- self.f1_1_id = f1_1.id
-
- def test_tagged_commit(self):
- # The user already has the tagged commit, all they want is the tag,
- # so send them only the tag object.
- self.assertMissingMatch(
- [self.cmt(1).id], [self._normal_tag.id], [self._normal_tag.id]
- )
-
- # The remaining cases are unusual, but do happen in the wild.
- def test_tagged_tag(self):
- # User already has tagged tag, send only tag of tag
- self.assertMissingMatch(
- [self._normal_tag.id], [self._tag_of_tag.id], [self._tag_of_tag.id]
- )
- # User needs both tags, but already has commit
- self.assertMissingMatch(
- [self.cmt(1).id],
- [self._tag_of_tag.id],
- [self._normal_tag.id, self._tag_of_tag.id],
- )
-
- def test_tagged_tree(self):
- self.assertMissingMatch(
- [],
- [self._tag_of_tree.id],
- [self._tag_of_tree.id, self.cmt(1).tree, self.f1_1_id],
- )
-
- def test_tagged_blob(self):
- self.assertMissingMatch(
- [], [self._tag_of_blob.id], [self._tag_of_blob.id, self.f1_1_id]
- )
-
- def test_tagged_tagged_blob(self):
- self.assertMissingMatch(
- [],
- [self._tag_of_tag_of_blob.id],
- [self._tag_of_tag_of_blob.id, self._tag_of_blob.id, self.f1_1_id],
- )
blob - fc026082fa4e723ecf8556b4441fc8e45cc3597f (mode 644)
blob + /dev/null
--- dulwich/tests/test_object_store.py
+++ /dev/null
-# test_object_store.py -- tests for object_store.py
-# Copyright (C) 2008 Jelmer Vernooij <jelmer@jelmer.uk>
-#
-# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
-# General Public License as public by the Free Software Foundation; version 2.0
-# or (at your option) any later version. You can redistribute it and/or
-# modify it under the terms of either of these two licenses.
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# You should have received a copy of the licenses; if not, see
-# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
-# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
-# License, Version 2.0.
-#
-
-"""Tests for the object store interface."""
-
-import os
-import shutil
-import stat
-import sys
-import tempfile
-from contextlib import closing
-from io import BytesIO
-from unittest import skipUnless
-
-from dulwich.tests import TestCase
-
-from ..errors import NotTreeError
-from ..index import commit_tree
-from ..object_store import (
- DiskObjectStore,
- MemoryObjectStore,
- ObjectStoreGraphWalker,
- OverlayObjectStore,
- commit_tree_changes,
- iter_tree_contents,
- peel_sha,
- read_packs_file,
- tree_lookup_path,
-)
-from ..objects import (
- S_IFGITLINK,
- Blob,
- EmptyFileException,
- SubmoduleEncountered,
- Tree,
- TreeEntry,
- sha_to_hex,
-)
-from ..pack import REF_DELTA, write_pack_objects
-from ..protocol import DEPTH_INFINITE
-from .utils import build_pack, make_object, make_tag
-
-try:
- from unittest.mock import patch
-except ImportError:
- patch = None # type: ignore
-
-
-testobject = make_object(Blob, data=b"yummy data")
-
-
-class ObjectStoreTests:
- def test_determine_wants_all(self):
- self.assertEqual(
- [b"1" * 40],
- self.store.determine_wants_all({b"refs/heads/foo": b"1" * 40}),
- )
-
- def test_determine_wants_all_zero(self):
- self.assertEqual(
- [], self.store.determine_wants_all({b"refs/heads/foo": b"0" * 40})
- )
-
- @skipUnless(patch, "Required mock.patch")
- def test_determine_wants_all_depth(self):
- self.store.add_object(testobject)
- refs = {b"refs/heads/foo": testobject.id}
- with patch.object(self.store, "_get_depth", return_value=1) as m:
- self.assertEqual([], self.store.determine_wants_all(refs, depth=0))
- self.assertEqual(
- [testobject.id],
- self.store.determine_wants_all(refs, depth=DEPTH_INFINITE),
- )
- m.assert_not_called()
-
- self.assertEqual([], self.store.determine_wants_all(refs, depth=1))
- m.assert_called_with(testobject.id)
- self.assertEqual(
- [testobject.id], self.store.determine_wants_all(refs, depth=2)
- )
-
- def test_get_depth(self):
- self.assertEqual(0, self.store._get_depth(testobject.id))
-
- self.store.add_object(testobject)
- self.assertEqual(
- 1, self.store._get_depth(testobject.id, get_parents=lambda x: [])
- )
-
- parent = make_object(Blob, data=b"parent data")
- self.store.add_object(parent)
- self.assertEqual(
- 2,
- self.store._get_depth(
- testobject.id,
- get_parents=lambda x: [parent.id] if x == testobject else [],
- ),
- )
-
- def test_iter(self):
- self.assertEqual([], list(self.store))
-
- def test_get_nonexistant(self):
- self.assertRaises(KeyError, lambda: self.store[b"a" * 40])
-
- def test_contains_nonexistant(self):
- self.assertNotIn(b"a" * 40, self.store)
-
- def test_add_objects_empty(self):
- self.store.add_objects([])
-
- def test_add_commit(self):
- # TODO: Argh, no way to construct Git commit objects without
- # access to a serialized form.
- self.store.add_objects([])
-
- def test_store_resilience(self):
- """Test if updating an existing stored object doesn't erase the
- object from the store.
- """
- test_object = make_object(Blob, data=b"data")
-
- self.store.add_object(test_object)
- test_object_id = test_object.id
- test_object.data = test_object.data + b"update"
- stored_test_object = self.store[test_object_id]
-
- self.assertNotEqual(test_object.id, stored_test_object.id)
- self.assertEqual(stored_test_object.id, test_object_id)
-
- def test_add_object(self):
- self.store.add_object(testobject)
- self.assertEqual({testobject.id}, set(self.store))
- self.assertIn(testobject.id, self.store)
- r = self.store[testobject.id]
- self.assertEqual(r, testobject)
-
- def test_add_objects(self):
- data = [(testobject, "mypath")]
- self.store.add_objects(data)
- self.assertEqual({testobject.id}, set(self.store))
- self.assertIn(testobject.id, self.store)
- r = self.store[testobject.id]
- self.assertEqual(r, testobject)
-
- def test_tree_changes(self):
- blob_a1 = make_object(Blob, data=b"a1")
- blob_a2 = make_object(Blob, data=b"a2")
- blob_b = make_object(Blob, data=b"b")
- for blob in [blob_a1, blob_a2, blob_b]:
- self.store.add_object(blob)
-
- blobs_1 = [(b"a", blob_a1.id, 0o100644), (b"b", blob_b.id, 0o100644)]
- tree1_id = commit_tree(self.store, blobs_1)
- blobs_2 = [(b"a", blob_a2.id, 0o100644), (b"b", blob_b.id, 0o100644)]
- tree2_id = commit_tree(self.store, blobs_2)
- change_a = (
- (b"a", b"a"),
- (0o100644, 0o100644),
- (blob_a1.id, blob_a2.id),
- )
- self.assertEqual([change_a], list(self.store.tree_changes(tree1_id, tree2_id)))
- self.assertEqual(
- [
- change_a,
- ((b"b", b"b"), (0o100644, 0o100644), (blob_b.id, blob_b.id)),
- ],
- list(self.store.tree_changes(tree1_id, tree2_id, want_unchanged=True)),
- )
-
- def test_iter_tree_contents(self):
- blob_a = make_object(Blob, data=b"a")
- blob_b = make_object(Blob, data=b"b")
- blob_c = make_object(Blob, data=b"c")
- for blob in [blob_a, blob_b, blob_c]:
- self.store.add_object(blob)
-
- blobs = [
- (b"a", blob_a.id, 0o100644),
- (b"ad/b", blob_b.id, 0o100644),
- (b"ad/bd/c", blob_c.id, 0o100755),
- (b"ad/c", blob_c.id, 0o100644),
- (b"c", blob_c.id, 0o100644),
- ]
- tree_id = commit_tree(self.store, blobs)
- self.assertEqual(
- [TreeEntry(p, m, h) for (p, h, m) in blobs],
- list(iter_tree_contents(self.store, tree_id)),
- )
- self.assertEqual([], list(iter_tree_contents(self.store, None)))
-
- def test_iter_tree_contents_include_trees(self):
- blob_a = make_object(Blob, data=b"a")
- blob_b = make_object(Blob, data=b"b")
- blob_c = make_object(Blob, data=b"c")
- for blob in [blob_a, blob_b, blob_c]:
- self.store.add_object(blob)
-
- blobs = [
- (b"a", blob_a.id, 0o100644),
- (b"ad/b", blob_b.id, 0o100644),
- (b"ad/bd/c", blob_c.id, 0o100755),
- ]
- tree_id = commit_tree(self.store, blobs)
- tree = self.store[tree_id]
- tree_ad = self.store[tree[b"ad"][1]]
- tree_bd = self.store[tree_ad[b"bd"][1]]
-
- expected = [
- TreeEntry(b"", 0o040000, tree_id),
- TreeEntry(b"a", 0o100644, blob_a.id),
- TreeEntry(b"ad", 0o040000, tree_ad.id),
- TreeEntry(b"ad/b", 0o100644, blob_b.id),
- TreeEntry(b"ad/bd", 0o040000, tree_bd.id),
- TreeEntry(b"ad/bd/c", 0o100755, blob_c.id),
- ]
- actual = iter_tree_contents(self.store, tree_id, include_trees=True)
- self.assertEqual(expected, list(actual))
-
- def make_tag(self, name, obj):
- tag = make_tag(obj, name=name)
- self.store.add_object(tag)
- return tag
-
- def test_peel_sha(self):
- self.store.add_object(testobject)
- tag1 = self.make_tag(b"1", testobject)
- tag2 = self.make_tag(b"2", testobject)
- tag3 = self.make_tag(b"3", testobject)
- for obj in [testobject, tag1, tag2, tag3]:
- self.assertEqual((obj, testobject), peel_sha(self.store, obj.id))
-
- def test_get_raw(self):
- self.store.add_object(testobject)
- self.assertEqual(
- (Blob.type_num, b"yummy data"), self.store.get_raw(testobject.id)
- )
-
- def test_close(self):
- # For now, just check that close doesn't barf.
- self.store.add_object(testobject)
- self.store.close()
-
-
-class OverlayObjectStoreTests(ObjectStoreTests, TestCase):
- def setUp(self):
- TestCase.setUp(self)
- self.bases = [MemoryObjectStore(), MemoryObjectStore()]
- self.store = OverlayObjectStore(self.bases, self.bases[0])
-
-
-class MemoryObjectStoreTests(ObjectStoreTests, TestCase):
- def setUp(self):
- TestCase.setUp(self)
- self.store = MemoryObjectStore()
-
- def test_add_pack(self):
- o = MemoryObjectStore()
- f, commit, abort = o.add_pack()
- try:
- b = make_object(Blob, data=b"more yummy data")
- write_pack_objects(f.write, [(b, None)])
- except BaseException:
- abort()
- raise
- else:
- commit()
-
- def test_add_pack_emtpy(self):
- o = MemoryObjectStore()
- f, commit, abort = o.add_pack()
- commit()
-
- def test_add_thin_pack(self):
- o = MemoryObjectStore()
- blob = make_object(Blob, data=b"yummy data")
- o.add_object(blob)
-
- f = BytesIO()
- entries = build_pack(
- f,
- [
- (REF_DELTA, (blob.id, b"more yummy data")),
- ],
- store=o,
- )
- o.add_thin_pack(f.read, None)
- packed_blob_sha = sha_to_hex(entries[0][3])
- self.assertEqual(
- (Blob.type_num, b"more yummy data"), o.get_raw(packed_blob_sha)
- )
-
- def test_add_thin_pack_empty(self):
- o = MemoryObjectStore()
-
- f = BytesIO()
- entries = build_pack(f, [], store=o)
- self.assertEqual([], entries)
- o.add_thin_pack(f.read, None)
-
-
-class PackBasedObjectStoreTests(ObjectStoreTests):
- def tearDown(self):
- for pack in self.store.packs:
- pack.close()
-
- def test_empty_packs(self):
- self.assertEqual([], list(self.store.packs))
-
- def test_pack_loose_objects(self):
- b1 = make_object(Blob, data=b"yummy data")
- self.store.add_object(b1)
- b2 = make_object(Blob, data=b"more yummy data")
- self.store.add_object(b2)
- b3 = make_object(Blob, data=b"even more yummy data")
- b4 = make_object(Blob, data=b"and more yummy data")
- self.store.add_objects([(b3, None), (b4, None)])
- self.assertEqual({b1.id, b2.id, b3.id, b4.id}, set(self.store))
- self.assertEqual(1, len(self.store.packs))
- self.assertEqual(2, self.store.pack_loose_objects())
- self.assertNotEqual([], list(self.store.packs))
- self.assertEqual(0, self.store.pack_loose_objects())
-
- def test_repack(self):
- b1 = make_object(Blob, data=b"yummy data")
- self.store.add_object(b1)
- b2 = make_object(Blob, data=b"more yummy data")
- self.store.add_object(b2)
- b3 = make_object(Blob, data=b"even more yummy data")
- b4 = make_object(Blob, data=b"and more yummy data")
- self.store.add_objects([(b3, None), (b4, None)])
- b5 = make_object(Blob, data=b"and more data")
- b6 = make_object(Blob, data=b"and some more data")
- self.store.add_objects([(b5, None), (b6, None)])
- self.assertEqual({b1.id, b2.id, b3.id, b4.id, b5.id, b6.id}, set(self.store))
- self.assertEqual(2, len(self.store.packs))
- self.assertEqual(6, self.store.repack())
- self.assertEqual(1, len(self.store.packs))
- self.assertEqual(0, self.store.pack_loose_objects())
-
- def test_repack_existing(self):
- b1 = make_object(Blob, data=b"yummy data")
- self.store.add_object(b1)
- b2 = make_object(Blob, data=b"more yummy data")
- self.store.add_object(b2)
- self.store.add_objects([(b1, None), (b2, None)])
- self.store.add_objects([(b2, None)])
- self.assertEqual({b1.id, b2.id}, set(self.store))
- self.assertEqual(2, len(self.store.packs))
- self.assertEqual(2, self.store.repack())
- self.assertEqual(1, len(self.store.packs))
- self.assertEqual(0, self.store.pack_loose_objects())
-
- self.assertEqual({b1.id, b2.id}, set(self.store))
- self.assertEqual(1, len(self.store.packs))
- self.assertEqual(2, self.store.repack())
- self.assertEqual(1, len(self.store.packs))
- self.assertEqual(0, self.store.pack_loose_objects())
-
-
-class DiskObjectStoreTests(PackBasedObjectStoreTests, TestCase):
- def setUp(self):
- TestCase.setUp(self)
- self.store_dir = tempfile.mkdtemp()
- self.addCleanup(shutil.rmtree, self.store_dir)
- self.store = DiskObjectStore.init(self.store_dir)
-
- def tearDown(self):
- TestCase.tearDown(self)
- PackBasedObjectStoreTests.tearDown(self)
-
- def test_loose_compression_level(self):
- alternate_dir = tempfile.mkdtemp()
- self.addCleanup(shutil.rmtree, alternate_dir)
- alternate_store = DiskObjectStore(alternate_dir, loose_compression_level=6)
- b2 = make_object(Blob, data=b"yummy data")
- alternate_store.add_object(b2)
-
- def test_alternates(self):
- alternate_dir = tempfile.mkdtemp()
- self.addCleanup(shutil.rmtree, alternate_dir)
- alternate_store = DiskObjectStore(alternate_dir)
- b2 = make_object(Blob, data=b"yummy data")
- alternate_store.add_object(b2)
- store = DiskObjectStore(self.store_dir)
- self.assertRaises(KeyError, store.__getitem__, b2.id)
- store.add_alternate_path(alternate_dir)
- self.assertIn(b2.id, store)
- self.assertEqual(b2, store[b2.id])
-
- def test_read_alternate_paths(self):
- store = DiskObjectStore(self.store_dir)
-
- abs_path = os.path.abspath(os.path.normpath("/abspath"))
- # ensures in particular existence of the alternates file
- store.add_alternate_path(abs_path)
- self.assertEqual(set(store._read_alternate_paths()), {abs_path})
-
- store.add_alternate_path("relative-path")
- self.assertIn(
- os.path.join(store.path, "relative-path"),
- set(store._read_alternate_paths()),
- )
-
- # arguably, add_alternate_path() could strip comments.
- # Meanwhile it's more convenient to use it than to import INFODIR
- store.add_alternate_path("# comment")
- for alt_path in store._read_alternate_paths():
- self.assertNotIn("#", alt_path)
-
- def test_file_modes(self):
- self.store.add_object(testobject)
- path = self.store._get_shafile_path(testobject.id)
- mode = os.stat(path).st_mode
-
- packmode = "0o100444" if sys.platform != "win32" else "0o100666"
- self.assertEqual(oct(mode), packmode)
-
- def test_corrupted_object_raise_exception(self):
- """Corrupted sha1 disk file should raise specific exception."""
- self.store.add_object(testobject)
- self.assertEqual(
- (Blob.type_num, b"yummy data"), self.store.get_raw(testobject.id)
- )
- self.assertTrue(self.store.contains_loose(testobject.id))
- self.assertIsNotNone(self.store._get_loose_object(testobject.id))
-
- path = self.store._get_shafile_path(testobject.id)
- old_mode = os.stat(path).st_mode
- os.chmod(path, 0o600)
- with open(path, "wb") as f: # corrupt the file
- f.write(b"")
- os.chmod(path, old_mode)
-
- expected_error_msg = "Corrupted empty file detected"
- try:
- self.store.contains_loose(testobject.id)
- except EmptyFileException as e:
- self.assertEqual(str(e), expected_error_msg)
-
- try:
- self.store._get_loose_object(testobject.id)
- except EmptyFileException as e:
- self.assertEqual(str(e), expected_error_msg)
-
- # this does not change iteration on loose objects though
- self.assertEqual([testobject.id], list(self.store._iter_loose_objects()))
-
- def test_tempfile_in_loose_store(self):
- self.store.add_object(testobject)
- self.assertEqual([testobject.id], list(self.store._iter_loose_objects()))
-
- # add temporary files to the loose store
- for i in range(256):
- dirname = os.path.join(self.store_dir, "%02x" % i)
- if not os.path.isdir(dirname):
- os.makedirs(dirname)
- fd, n = tempfile.mkstemp(prefix="tmp_obj_", dir=dirname)
- os.close(fd)
-
- self.assertEqual([testobject.id], list(self.store._iter_loose_objects()))
-
- def test_add_alternate_path(self):
- store = DiskObjectStore(self.store_dir)
- self.assertEqual([], list(store._read_alternate_paths()))
- store.add_alternate_path("/foo/path")
- self.assertEqual(["/foo/path"], list(store._read_alternate_paths()))
- store.add_alternate_path("/bar/path")
- self.assertEqual(
- ["/foo/path", "/bar/path"], list(store._read_alternate_paths())
- )
-
- def test_rel_alternative_path(self):
- alternate_dir = tempfile.mkdtemp()
- self.addCleanup(shutil.rmtree, alternate_dir)
- alternate_store = DiskObjectStore(alternate_dir)
- b2 = make_object(Blob, data=b"yummy data")
- alternate_store.add_object(b2)
- store = DiskObjectStore(self.store_dir)
- self.assertRaises(KeyError, store.__getitem__, b2.id)
- store.add_alternate_path(os.path.relpath(alternate_dir, self.store_dir))
- self.assertEqual(list(alternate_store), list(store.alternates[0]))
- self.assertIn(b2.id, store)
- self.assertEqual(b2, store[b2.id])
-
- def test_pack_dir(self):
- o = DiskObjectStore(self.store_dir)
- self.assertEqual(os.path.join(self.store_dir, "pack"), o.pack_dir)
-
- def test_add_pack(self):
- o = DiskObjectStore(self.store_dir)
- self.addCleanup(o.close)
- f, commit, abort = o.add_pack()
- try:
- b = make_object(Blob, data=b"more yummy data")
- write_pack_objects(f.write, [(b, None)])
- except BaseException:
- abort()
- raise
- else:
- commit()
-
- def test_add_thin_pack(self):
- o = DiskObjectStore(self.store_dir)
- try:
- blob = make_object(Blob, data=b"yummy data")
- o.add_object(blob)
-
- f = BytesIO()
- entries = build_pack(
- f,
- [
- (REF_DELTA, (blob.id, b"more yummy data")),
- ],
- store=o,
- )
-
- with o.add_thin_pack(f.read, None) as pack:
- packed_blob_sha = sha_to_hex(entries[0][3])
- pack.check_length_and_checksum()
- self.assertEqual(sorted([blob.id, packed_blob_sha]), list(pack))
- self.assertTrue(o.contains_packed(packed_blob_sha))
- self.assertTrue(o.contains_packed(blob.id))
- self.assertEqual(
- (Blob.type_num, b"more yummy data"),
- o.get_raw(packed_blob_sha),
- )
- finally:
- o.close()
-
- def test_add_thin_pack_empty(self):
- with closing(DiskObjectStore(self.store_dir)) as o:
- f = BytesIO()
- entries = build_pack(f, [], store=o)
- self.assertEqual([], entries)
- o.add_thin_pack(f.read, None)
-
-
-class TreeLookupPathTests(TestCase):
- def setUp(self):
- TestCase.setUp(self)
- self.store = MemoryObjectStore()
- blob_a = make_object(Blob, data=b"a")
- blob_b = make_object(Blob, data=b"b")
- blob_c = make_object(Blob, data=b"c")
- for blob in [blob_a, blob_b, blob_c]:
- self.store.add_object(blob)
-
- blobs = [
- (b"a", blob_a.id, 0o100644),
- (b"ad/b", blob_b.id, 0o100644),
- (b"ad/bd/c", blob_c.id, 0o100755),
- (b"ad/c", blob_c.id, 0o100644),
- (b"c", blob_c.id, 0o100644),
- (b"d", blob_c.id, S_IFGITLINK),
- ]
- self.tree_id = commit_tree(self.store, blobs)
-
- def get_object(self, sha):
- return self.store[sha]
-
- def test_lookup_blob(self):
- o_id = tree_lookup_path(self.get_object, self.tree_id, b"a")[1]
- self.assertIsInstance(self.store[o_id], Blob)
-
- def test_lookup_tree(self):
- o_id = tree_lookup_path(self.get_object, self.tree_id, b"ad")[1]
- self.assertIsInstance(self.store[o_id], Tree)
- o_id = tree_lookup_path(self.get_object, self.tree_id, b"ad/bd")[1]
- self.assertIsInstance(self.store[o_id], Tree)
- o_id = tree_lookup_path(self.get_object, self.tree_id, b"ad/bd/")[1]
- self.assertIsInstance(self.store[o_id], Tree)
-
- def test_lookup_submodule(self):
- tree_lookup_path(self.get_object, self.tree_id, b"d")[1]
- self.assertRaises(
- SubmoduleEncountered,
- tree_lookup_path,
- self.get_object,
- self.tree_id,
- b"d/a",
- )
-
- def test_lookup_nonexistent(self):
- self.assertRaises(
- KeyError, tree_lookup_path, self.get_object, self.tree_id, b"j"
- )
-
- def test_lookup_not_tree(self):
- self.assertRaises(
- NotTreeError,
- tree_lookup_path,
- self.get_object,
- self.tree_id,
- b"ad/b/j",
- )
-
-
-class ObjectStoreGraphWalkerTests(TestCase):
- def get_walker(self, heads, parent_map):
- new_parent_map = {
- k * 40: [(p * 40) for p in ps] for (k, ps) in parent_map.items()
- }
- return ObjectStoreGraphWalker(
- [x * 40 for x in heads], new_parent_map.__getitem__
- )
-
- def test_ack_invalid_value(self):
- gw = self.get_walker([], {})
- self.assertRaises(ValueError, gw.ack, "tooshort")
-
- def test_empty(self):
- gw = self.get_walker([], {})
- self.assertIs(None, next(gw))
- gw.ack(b"a" * 40)
- self.assertIs(None, next(gw))
-
- def test_descends(self):
- gw = self.get_walker([b"a"], {b"a": [b"b"], b"b": []})
- self.assertEqual(b"a" * 40, next(gw))
- self.assertEqual(b"b" * 40, next(gw))
-
- def test_present(self):
- gw = self.get_walker([b"a"], {b"a": [b"b"], b"b": []})
- gw.ack(b"a" * 40)
- self.assertIs(None, next(gw))
-
- def test_parent_present(self):
- gw = self.get_walker([b"a"], {b"a": [b"b"], b"b": []})
- self.assertEqual(b"a" * 40, next(gw))
- gw.ack(b"a" * 40)
- self.assertIs(None, next(gw))
-
- def test_child_ack_later(self):
- gw = self.get_walker([b"a"], {b"a": [b"b"], b"b": [b"c"], b"c": []})
- self.assertEqual(b"a" * 40, next(gw))
- self.assertEqual(b"b" * 40, next(gw))
- gw.ack(b"a" * 40)
- self.assertIs(None, next(gw))
-
- def test_only_once(self):
- # a b
- # | |
- # c d
- # \ /
- # e
- gw = self.get_walker(
- [b"a", b"b"],
- {
- b"a": [b"c"],
- b"b": [b"d"],
- b"c": [b"e"],
- b"d": [b"e"],
- b"e": [],
- },
- )
- walk = []
- acked = False
- walk.append(next(gw))
- walk.append(next(gw))
- # A branch (a, c) or (b, d) may be done after 2 steps or 3 depending on
- # the order walked: 3-step walks include (a, b, c) and (b, a, d), etc.
- if walk == [b"a" * 40, b"c" * 40] or walk == [b"b" * 40, b"d" * 40]:
- gw.ack(walk[0])
- acked = True
-
- walk.append(next(gw))
- if not acked and walk[2] == b"c" * 40:
- gw.ack(b"a" * 40)
- elif not acked and walk[2] == b"d" * 40:
- gw.ack(b"b" * 40)
- walk.append(next(gw))
- self.assertIs(None, next(gw))
-
- self.assertEqual([b"a" * 40, b"b" * 40, b"c" * 40, b"d" * 40], sorted(walk))
- self.assertLess(walk.index(b"a" * 40), walk.index(b"c" * 40))
- self.assertLess(walk.index(b"b" * 40), walk.index(b"d" * 40))
-
-
-class CommitTreeChangesTests(TestCase):
- def setUp(self):
- super().setUp()
- self.store = MemoryObjectStore()
- self.blob_a = make_object(Blob, data=b"a")
- self.blob_b = make_object(Blob, data=b"b")
- self.blob_c = make_object(Blob, data=b"c")
- for blob in [self.blob_a, self.blob_b, self.blob_c]:
- self.store.add_object(blob)
-
- blobs = [
- (b"a", self.blob_a.id, 0o100644),
- (b"ad/b", self.blob_b.id, 0o100644),
- (b"ad/bd/c", self.blob_c.id, 0o100755),
- (b"ad/c", self.blob_c.id, 0o100644),
- (b"c", self.blob_c.id, 0o100644),
- ]
- self.tree_id = commit_tree(self.store, blobs)
-
- def test_no_changes(self):
- self.assertEqual(
- self.store[self.tree_id],
- commit_tree_changes(self.store, self.store[self.tree_id], []),
- )
-
- def test_add_blob(self):
- blob_d = make_object(Blob, data=b"d")
- new_tree = commit_tree_changes(
- self.store, self.store[self.tree_id], [(b"d", 0o100644, blob_d.id)]
- )
- self.assertEqual(
- new_tree[b"d"],
- (33188, b"c59d9b6344f1af00e504ba698129f07a34bbed8d"),
- )
-
- def test_add_blob_in_dir(self):
- blob_d = make_object(Blob, data=b"d")
- new_tree = commit_tree_changes(
- self.store,
- self.store[self.tree_id],
- [(b"e/f/d", 0o100644, blob_d.id)],
- )
- self.assertEqual(
- new_tree.items(),
- [
- TreeEntry(path=b"a", mode=stat.S_IFREG | 0o100644, sha=self.blob_a.id),
- TreeEntry(
- path=b"ad",
- mode=stat.S_IFDIR,
- sha=b"0e2ce2cd7725ff4817791be31ccd6e627e801f4a",
- ),
- TreeEntry(path=b"c", mode=stat.S_IFREG | 0o100644, sha=self.blob_c.id),
- TreeEntry(
- path=b"e",
- mode=stat.S_IFDIR,
- sha=b"6ab344e288724ac2fb38704728b8896e367ed108",
- ),
- ],
- )
- e_tree = self.store[new_tree[b"e"][1]]
- self.assertEqual(
- e_tree.items(),
- [
- TreeEntry(
- path=b"f",
- mode=stat.S_IFDIR,
- sha=b"24d2c94d8af232b15a0978c006bf61ef4479a0a5",
- )
- ],
- )
- f_tree = self.store[e_tree[b"f"][1]]
- self.assertEqual(
- f_tree.items(),
- [TreeEntry(path=b"d", mode=stat.S_IFREG | 0o100644, sha=blob_d.id)],
- )
-
- def test_delete_blob(self):
- new_tree = commit_tree_changes(
- self.store, self.store[self.tree_id], [(b"ad/bd/c", None, None)]
- )
- self.assertEqual(set(new_tree), {b"a", b"ad", b"c"})
- ad_tree = self.store[new_tree[b"ad"][1]]
- self.assertEqual(set(ad_tree), {b"b", b"c"})
-
-
-class TestReadPacksFile(TestCase):
- def test_read_packs(self):
- self.assertEqual(
- ["pack-1.pack"],
- list(
- read_packs_file(
- BytesIO(
- b"""P pack-1.pack
-"""
- )
- )
- ),
- )
blob - 8b8b5b5e6e6daaba5eb32206f70b5a1978a2c6ce (mode 644)
blob + /dev/null
--- dulwich/tests/test_objects.py
+++ /dev/null
-# test_objects.py -- tests for objects.py
-# Copyright (C) 2007 James Westby <jw+debian@jameswestby.net>
-#
-# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
-# General Public License as public by the Free Software Foundation; version 2.0
-# or (at your option) any later version. You can redistribute it and/or
-# modify it under the terms of either of these two licenses.
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# You should have received a copy of the licenses; if not, see
-# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
-# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
-# License, Version 2.0.
-#
-
-"""Tests for git base objects."""
-
-# TODO: Round-trip parse-serialize-parse and serialize-parse-serialize tests.
-
-import datetime
-import os
-import stat
-from contextlib import contextmanager
-from io import BytesIO
-from itertools import permutations
-
-from dulwich.tests import TestCase
-
-from ..errors import ObjectFormatException
-from ..objects import (
- MAX_TIME,
- Blob,
- Commit,
- ShaFile,
- Tag,
- Tree,
- TreeEntry,
- _parse_tree_py,
- _sorted_tree_items_py,
- check_hexsha,
- check_identity,
- format_timezone,
- hex_to_filename,
- hex_to_sha,
- object_class,
- parse_timezone,
- parse_tree,
- pretty_format_tree_entry,
- sha_to_hex,
- sorted_tree_items,
-)
-from .utils import ext_functest_builder, functest_builder, make_commit, make_object
-
-a_sha = b"6f670c0fb53f9463760b7295fbb814e965fb20c8"
-b_sha = b"2969be3e8ee1c0222396a5611407e4769f14e54b"
-c_sha = b"954a536f7819d40e6f637f849ee187dd10066349"
-tree_sha = b"70c190eb48fa8bbb50ddc692a17b44cb781af7f6"
-tag_sha = b"71033db03a03c6a36721efcf1968dd8f8e0cf023"
-
-
-class TestHexToSha(TestCase):
- def test_simple(self):
- self.assertEqual(b"\xab\xcd" * 10, hex_to_sha(b"abcd" * 10))
-
- def test_reverse(self):
- self.assertEqual(b"abcd" * 10, sha_to_hex(b"\xab\xcd" * 10))
-
-
-class BlobReadTests(TestCase):
- """Test decompression of blobs."""
-
- def get_sha_file(self, cls, base, sha):
- dir = os.path.join(os.path.dirname(__file__), "..", "..", "testdata", base)
- return cls.from_path(hex_to_filename(dir, sha))
-
- def get_blob(self, sha):
- """Return the blob named sha from the test data dir."""
- return self.get_sha_file(Blob, "blobs", sha)
-
- def get_tree(self, sha):
- return self.get_sha_file(Tree, "trees", sha)
-
- def get_tag(self, sha):
- return self.get_sha_file(Tag, "tags", sha)
-
- def commit(self, sha):
- return self.get_sha_file(Commit, "commits", sha)
-
- def test_decompress_simple_blob(self):
- b = self.get_blob(a_sha)
- self.assertEqual(b.data, b"test 1\n")
- self.assertEqual(b.sha().hexdigest().encode("ascii"), a_sha)
-
- def test_hash(self):
- b = self.get_blob(a_sha)
- self.assertEqual(hash(b.id), hash(b))
-
- def test_parse_empty_blob_object(self):
- sha = b"e69de29bb2d1d6434b8b29ae775ad8c2e48c5391"
- b = self.get_blob(sha)
- self.assertEqual(b.data, b"")
- self.assertEqual(b.id, sha)
- self.assertEqual(b.sha().hexdigest().encode("ascii"), sha)
-
- def test_create_blob_from_string(self):
- string = b"test 2\n"
- b = Blob.from_string(string)
- self.assertEqual(b.data, string)
- self.assertEqual(b.sha().hexdigest().encode("ascii"), b_sha)
-
- def test_legacy_from_file(self):
- b1 = Blob.from_string(b"foo")
- b_raw = b1.as_legacy_object()
- b2 = b1.from_file(BytesIO(b_raw))
- self.assertEqual(b1, b2)
-
- def test_legacy_from_file_compression_level(self):
- b1 = Blob.from_string(b"foo")
- b_raw = b1.as_legacy_object(compression_level=6)
- b2 = b1.from_file(BytesIO(b_raw))
- self.assertEqual(b1, b2)
-
- def test_chunks(self):
- string = b"test 5\n"
- b = Blob.from_string(string)
- self.assertEqual([string], b.chunked)
-
- def test_splitlines(self):
- for case in [
- [],
- [b"foo\nbar\n"],
- [b"bl\na", b"blie"],
- [b"bl\na", b"blie", b"bloe\n"],
- [b"", b"bl\na", b"blie", b"bloe\n"],
- [b"", b"", b"", b"bla\n"],
- [b"", b"", b"", b"bla\n", b""],
- [b"bl", b"", b"a\naaa"],
- [b"a\naaa", b"a"],
- ]:
- b = Blob()
- b.chunked = case
- self.assertEqual(b.data.splitlines(True), b.splitlines())
-
- def test_set_chunks(self):
- b = Blob()
- b.chunked = [b"te", b"st", b" 5\n"]
- self.assertEqual(b"test 5\n", b.data)
- b.chunked = [b"te", b"st", b" 6\n"]
- self.assertEqual(b"test 6\n", b.as_raw_string())
- self.assertEqual(b"test 6\n", bytes(b))
-
- def test_parse_legacy_blob(self):
- string = b"test 3\n"
- b = self.get_blob(c_sha)
- self.assertEqual(b.data, string)
- self.assertEqual(b.sha().hexdigest().encode("ascii"), c_sha)
-
- def test_eq(self):
- blob1 = self.get_blob(a_sha)
- blob2 = self.get_blob(a_sha)
- self.assertEqual(blob1, blob2)
-
- def test_read_tree_from_file(self):
- t = self.get_tree(tree_sha)
- self.assertEqual(t.items()[0], (b"a", 33188, a_sha))
- self.assertEqual(t.items()[1], (b"b", 33188, b_sha))
-
- def test_read_tree_from_file_parse_count(self):
- old_deserialize = Tree._deserialize
-
- def reset_deserialize():
- Tree._deserialize = old_deserialize
-
- self.addCleanup(reset_deserialize)
- self.deserialize_count = 0
-
- def counting_deserialize(*args, **kwargs):
- self.deserialize_count += 1
- return old_deserialize(*args, **kwargs)
-
- Tree._deserialize = counting_deserialize
- t = self.get_tree(tree_sha)
- self.assertEqual(t.items()[0], (b"a", 33188, a_sha))
- self.assertEqual(t.items()[1], (b"b", 33188, b_sha))
- self.assertEqual(self.deserialize_count, 1)
-
- def test_read_tag_from_file(self):
- t = self.get_tag(tag_sha)
- self.assertEqual(
- t.object, (Commit, b"51b668fd5bf7061b7d6fa525f88803e6cfadaa51")
- )
- self.assertEqual(t.name, b"signed")
- self.assertEqual(t.tagger, b"Ali Sabil <ali.sabil@gmail.com>")
- self.assertEqual(t.tag_time, 1231203091)
- self.assertEqual(t.message, b"This is a signed tag\n")
- self.assertEqual(
- t.signature,
- b"-----BEGIN PGP SIGNATURE-----\n"
- b"Version: GnuPG v1.4.9 (GNU/Linux)\n"
- b"\n"
- b"iEYEABECAAYFAkliqx8ACgkQqSMmLy9u/"
- b"kcx5ACfakZ9NnPl02tOyYP6pkBoEkU1\n"
- b"5EcAn0UFgokaSvS371Ym/4W9iJj6vh3h\n"
- b"=ql7y\n"
- b"-----END PGP SIGNATURE-----\n",
- )
-
- def test_read_commit_from_file(self):
- sha = b"60dacdc733de308bb77bb76ce0fb0f9b44c9769e"
- c = self.commit(sha)
- self.assertEqual(c.tree, tree_sha)
- self.assertEqual(c.parents, [b"0d89f20333fbb1d2f3a94da77f4981373d8f4310"])
- self.assertEqual(c.author, b"James Westby <jw+debian@jameswestby.net>")
- self.assertEqual(c.committer, b"James Westby <jw+debian@jameswestby.net>")
- self.assertEqual(c.commit_time, 1174759230)
- self.assertEqual(c.commit_timezone, 0)
- self.assertEqual(c.author_timezone, 0)
- self.assertEqual(c.message, b"Test commit\n")
-
- def test_read_commit_no_parents(self):
- sha = b"0d89f20333fbb1d2f3a94da77f4981373d8f4310"
- c = self.commit(sha)
- self.assertEqual(c.tree, b"90182552c4a85a45ec2a835cadc3451bebdfe870")
- self.assertEqual(c.parents, [])
- self.assertEqual(c.author, b"James Westby <jw+debian@jameswestby.net>")
- self.assertEqual(c.committer, b"James Westby <jw+debian@jameswestby.net>")
- self.assertEqual(c.commit_time, 1174758034)
- self.assertEqual(c.commit_timezone, 0)
- self.assertEqual(c.author_timezone, 0)
- self.assertEqual(c.message, b"Test commit\n")
-
- def test_read_commit_two_parents(self):
- sha = b"5dac377bdded4c9aeb8dff595f0faeebcc8498cc"
- c = self.commit(sha)
- self.assertEqual(c.tree, b"d80c186a03f423a81b39df39dc87fd269736ca86")
- self.assertEqual(
- c.parents,
- [
- b"ab64bbdcc51b170d21588e5c5d391ee5c0c96dfd",
- b"4cffe90e0a41ad3f5190079d7c8f036bde29cbe6",
- ],
- )
- self.assertEqual(c.author, b"James Westby <jw+debian@jameswestby.net>")
- self.assertEqual(c.committer, b"James Westby <jw+debian@jameswestby.net>")
- self.assertEqual(c.commit_time, 1174773719)
- self.assertEqual(c.commit_timezone, 0)
- self.assertEqual(c.author_timezone, 0)
- self.assertEqual(c.message, b"Merge ../b\n")
-
- def test_stub_sha(self):
- sha = b"5" * 40
- c = make_commit(id=sha, message=b"foo")
- self.assertIsInstance(c, Commit)
- self.assertEqual(sha, c.id)
- self.assertNotEqual(sha, c.sha())
-
-
-class ShaFileCheckTests(TestCase):
- def assertCheckFails(self, cls, data):
- obj = cls()
-
- def do_check():
- obj.set_raw_string(data)
- obj.check()
-
- self.assertRaises(ObjectFormatException, do_check)
-
- def assertCheckSucceeds(self, cls, data):
- obj = cls()
- obj.set_raw_string(data)
- self.assertEqual(None, obj.check())
-
-
-small_buffer_zlib_object = (
- b"\x48\x89\x15\xcc\x31\x0e\xc2\x30\x0c\x40\x51\xe6"
- b"\x9c\xc2\x3b\xaa\x64\x37\xc4\xc1\x12\x42\x5c\xc5"
- b"\x49\xac\x52\xd4\x92\xaa\x78\xe1\xf6\x94\xed\xeb"
- b"\x0d\xdf\x75\x02\xa2\x7c\xea\xe5\x65\xd5\x81\x8b"
- b"\x9a\x61\xba\xa0\xa9\x08\x36\xc9\x4c\x1a\xad\x88"
- b"\x16\xba\x46\xc4\xa8\x99\x6a\x64\xe1\xe0\xdf\xcd"
- b"\xa0\xf6\x75\x9d\x3d\xf8\xf1\xd0\x77\xdb\xfb\xdc"
- b"\x86\xa3\x87\xf1\x2f\x93\xed\x00\xb7\xc7\xd2\xab"
- b"\x2e\xcf\xfe\xf1\x3b\x50\xa4\x91\x53\x12\x24\x38"
- b"\x23\x21\x86\xf0\x03\x2f\x91\x24\x52"
-)
-
-
-class ShaFileTests(TestCase):
- def test_deflated_smaller_window_buffer(self):
- # zlib on some systems uses smaller buffers,
- # resulting in a different header.
- # See https://github.com/libgit2/libgit2/pull/464
- sf = ShaFile.from_file(BytesIO(small_buffer_zlib_object))
- self.assertEqual(sf.type_name, b"tag")
- self.assertEqual(sf.tagger, b" <@localhost>")
-
-
-class CommitSerializationTests(TestCase):
- def make_commit(self, **kwargs):
- attrs = {
- "tree": b"d80c186a03f423a81b39df39dc87fd269736ca86",
- "parents": [
- b"ab64bbdcc51b170d21588e5c5d391ee5c0c96dfd",
- b"4cffe90e0a41ad3f5190079d7c8f036bde29cbe6",
- ],
- "author": b"James Westby <jw+debian@jameswestby.net>",
- "committer": b"James Westby <jw+debian@jameswestby.net>",
- "commit_time": 1174773719,
- "author_time": 1174773719,
- "commit_timezone": 0,
- "author_timezone": 0,
- "message": b"Merge ../b\n",
- }
- attrs.update(kwargs)
- return make_commit(**attrs)
-
- def test_encoding(self):
- c = self.make_commit(encoding=b"iso8859-1")
- self.assertIn(b"encoding iso8859-1\n", c.as_raw_string())
-
- def test_short_timestamp(self):
- c = self.make_commit(commit_time=30)
- c1 = Commit()
- c1.set_raw_string(c.as_raw_string())
- self.assertEqual(30, c1.commit_time)
-
- def test_full_tree(self):
- c = self.make_commit(commit_time=30)
- t = Tree()
- t.add(b"data-x", 0o644, Blob().id)
- c.tree = t
- c1 = Commit()
- c1.set_raw_string(c.as_raw_string())
- self.assertEqual(t.id, c1.tree)
- self.assertEqual(c.as_raw_string(), c1.as_raw_string())
-
- def test_raw_length(self):
- c = self.make_commit()
- self.assertEqual(len(c.as_raw_string()), c.raw_length())
-
- def test_simple(self):
- c = self.make_commit()
- self.assertEqual(c.id, b"5dac377bdded4c9aeb8dff595f0faeebcc8498cc")
- self.assertEqual(
- b"tree d80c186a03f423a81b39df39dc87fd269736ca86\n"
- b"parent ab64bbdcc51b170d21588e5c5d391ee5c0c96dfd\n"
- b"parent 4cffe90e0a41ad3f5190079d7c8f036bde29cbe6\n"
- b"author James Westby <jw+debian@jameswestby.net> "
- b"1174773719 +0000\n"
- b"committer James Westby <jw+debian@jameswestby.net> "
- b"1174773719 +0000\n"
- b"\n"
- b"Merge ../b\n",
- c.as_raw_string(),
- )
-
- def test_timezone(self):
- c = self.make_commit(commit_timezone=(5 * 60))
- self.assertIn(b" +0005\n", c.as_raw_string())
-
- def test_neg_timezone(self):
- c = self.make_commit(commit_timezone=(-1 * 3600))
- self.assertIn(b" -0100\n", c.as_raw_string())
-
- def test_deserialize(self):
- c = self.make_commit()
- d = Commit()
- d._deserialize(c.as_raw_chunks())
- self.assertEqual(c, d)
-
- def test_serialize_gpgsig(self):
- commit = self.make_commit(
- gpgsig=b"""-----BEGIN PGP SIGNATURE-----
-Version: GnuPG v1
-
-iQIcBAABCgAGBQJULCdfAAoJEACAbyvXKaRXuKwP/RyP9PA49uAvu8tQVCC/uBa8
-vi975+xvO14R8Pp8k2nps7lSxCdtCd+xVT1VRHs0wNhOZo2YCVoU1HATkPejqSeV
-NScTHcxnk4/+bxyfk14xvJkNp7FlQ3npmBkA+lbV0Ubr33rvtIE5jiJPyz+SgWAg
-xdBG2TojV0squj00GoH/euK6aX7GgZtwdtpTv44haCQdSuPGDcI4TORqR6YSqvy3
-GPE+3ZqXPFFb+KILtimkxitdwB7CpwmNse2vE3rONSwTvi8nq3ZoQYNY73CQGkUy
-qoFU0pDtw87U3niFin1ZccDgH0bB6624sLViqrjcbYJeg815Htsu4rmzVaZADEVC
-XhIO4MThebusdk0AcNGjgpf3HRHk0DPMDDlIjm+Oao0cqovvF6VyYmcb0C+RmhJj
-dodLXMNmbqErwTk3zEkW0yZvNIYXH7m9SokPCZa4eeIM7be62X6h1mbt0/IU6Th+
-v18fS0iTMP/Viug5und+05C/v04kgDo0CPphAbXwWMnkE4B6Tl9sdyUYXtvQsL7x
-0+WP1gL27ANqNZiI07Kz/BhbBAQI/+2TFT7oGr0AnFPQ5jHp+3GpUf6OKuT1wT3H
-ND189UFuRuubxb42vZhpcXRbqJVWnbECTKVUPsGZqat3enQUB63uM4i6/RdONDZA
-fDeF1m4qYs+cUXKNUZ03
-=X6RT
------END PGP SIGNATURE-----"""
- )
- self.maxDiff = None
- self.assertEqual(
- b"""\
-tree d80c186a03f423a81b39df39dc87fd269736ca86
-parent ab64bbdcc51b170d21588e5c5d391ee5c0c96dfd
-parent 4cffe90e0a41ad3f5190079d7c8f036bde29cbe6
-author James Westby <jw+debian@jameswestby.net> 1174773719 +0000
-committer James Westby <jw+debian@jameswestby.net> 1174773719 +0000
-gpgsig -----BEGIN PGP SIGNATURE-----
- Version: GnuPG v1
-
- iQIcBAABCgAGBQJULCdfAAoJEACAbyvXKaRXuKwP/RyP9PA49uAvu8tQVCC/uBa8
- vi975+xvO14R8Pp8k2nps7lSxCdtCd+xVT1VRHs0wNhOZo2YCVoU1HATkPejqSeV
- NScTHcxnk4/+bxyfk14xvJkNp7FlQ3npmBkA+lbV0Ubr33rvtIE5jiJPyz+SgWAg
- xdBG2TojV0squj00GoH/euK6aX7GgZtwdtpTv44haCQdSuPGDcI4TORqR6YSqvy3
- GPE+3ZqXPFFb+KILtimkxitdwB7CpwmNse2vE3rONSwTvi8nq3ZoQYNY73CQGkUy
- qoFU0pDtw87U3niFin1ZccDgH0bB6624sLViqrjcbYJeg815Htsu4rmzVaZADEVC
- XhIO4MThebusdk0AcNGjgpf3HRHk0DPMDDlIjm+Oao0cqovvF6VyYmcb0C+RmhJj
- dodLXMNmbqErwTk3zEkW0yZvNIYXH7m9SokPCZa4eeIM7be62X6h1mbt0/IU6Th+
- v18fS0iTMP/Viug5und+05C/v04kgDo0CPphAbXwWMnkE4B6Tl9sdyUYXtvQsL7x
- 0+WP1gL27ANqNZiI07Kz/BhbBAQI/+2TFT7oGr0AnFPQ5jHp+3GpUf6OKuT1wT3H
- ND189UFuRuubxb42vZhpcXRbqJVWnbECTKVUPsGZqat3enQUB63uM4i6/RdONDZA
- fDeF1m4qYs+cUXKNUZ03
- =X6RT
- -----END PGP SIGNATURE-----
-
-Merge ../b
-""",
- commit.as_raw_string(),
- )
-
- def test_serialize_mergetag(self):
- tag = make_object(
- Tag,
- object=(Commit, b"a38d6181ff27824c79fc7df825164a212eff6a3f"),
- object_type_name=b"commit",
- name=b"v2.6.22-rc7",
- tag_time=1183319674,
- tag_timezone=0,
- tagger=b"Linus Torvalds <torvalds@woody.linux-foundation.org>",
- message=default_message,
- )
- commit = self.make_commit(mergetag=[tag])
-
- self.assertEqual(
- b"""tree d80c186a03f423a81b39df39dc87fd269736ca86
-parent ab64bbdcc51b170d21588e5c5d391ee5c0c96dfd
-parent 4cffe90e0a41ad3f5190079d7c8f036bde29cbe6
-author James Westby <jw+debian@jameswestby.net> 1174773719 +0000
-committer James Westby <jw+debian@jameswestby.net> 1174773719 +0000
-mergetag object a38d6181ff27824c79fc7df825164a212eff6a3f
- type commit
- tag v2.6.22-rc7
- tagger Linus Torvalds <torvalds@woody.linux-foundation.org> 1183319674 +0000
-
- Linux 2.6.22-rc7
- -----BEGIN PGP SIGNATURE-----
- Version: GnuPG v1.4.7 (GNU/Linux)
-
- iD8DBQBGiAaAF3YsRnbiHLsRAitMAKCiLboJkQECM/jpYsY3WPfvUgLXkACgg3ql
- OK2XeQOiEeXtT76rV4t2WR4=
- =ivrA
- -----END PGP SIGNATURE-----
-
-Merge ../b
-""",
- commit.as_raw_string(),
- )
-
- def test_serialize_mergetags(self):
- tag = make_object(
- Tag,
- object=(Commit, b"a38d6181ff27824c79fc7df825164a212eff6a3f"),
- object_type_name=b"commit",
- name=b"v2.6.22-rc7",
- tag_time=1183319674,
- tag_timezone=0,
- tagger=b"Linus Torvalds <torvalds@woody.linux-foundation.org>",
- message=default_message,
- )
- commit = self.make_commit(mergetag=[tag, tag])
-
- self.assertEqual(
- b"""tree d80c186a03f423a81b39df39dc87fd269736ca86
-parent ab64bbdcc51b170d21588e5c5d391ee5c0c96dfd
-parent 4cffe90e0a41ad3f5190079d7c8f036bde29cbe6
-author James Westby <jw+debian@jameswestby.net> 1174773719 +0000
-committer James Westby <jw+debian@jameswestby.net> 1174773719 +0000
-mergetag object a38d6181ff27824c79fc7df825164a212eff6a3f
- type commit
- tag v2.6.22-rc7
- tagger Linus Torvalds <torvalds@woody.linux-foundation.org> 1183319674 +0000
-
- Linux 2.6.22-rc7
- -----BEGIN PGP SIGNATURE-----
- Version: GnuPG v1.4.7 (GNU/Linux)
-
- iD8DBQBGiAaAF3YsRnbiHLsRAitMAKCiLboJkQECM/jpYsY3WPfvUgLXkACgg3ql
- OK2XeQOiEeXtT76rV4t2WR4=
- =ivrA
- -----END PGP SIGNATURE-----
-mergetag object a38d6181ff27824c79fc7df825164a212eff6a3f
- type commit
- tag v2.6.22-rc7
- tagger Linus Torvalds <torvalds@woody.linux-foundation.org> 1183319674 +0000
-
- Linux 2.6.22-rc7
- -----BEGIN PGP SIGNATURE-----
- Version: GnuPG v1.4.7 (GNU/Linux)
-
- iD8DBQBGiAaAF3YsRnbiHLsRAitMAKCiLboJkQECM/jpYsY3WPfvUgLXkACgg3ql
- OK2XeQOiEeXtT76rV4t2WR4=
- =ivrA
- -----END PGP SIGNATURE-----
-
-Merge ../b
-""",
- commit.as_raw_string(),
- )
-
- def test_deserialize_mergetag(self):
- tag = make_object(
- Tag,
- object=(Commit, b"a38d6181ff27824c79fc7df825164a212eff6a3f"),
- object_type_name=b"commit",
- name=b"v2.6.22-rc7",
- tag_time=1183319674,
- tag_timezone=0,
- tagger=b"Linus Torvalds <torvalds@woody.linux-foundation.org>",
- message=default_message,
- )
- commit = self.make_commit(mergetag=[tag])
-
- d = Commit()
- d._deserialize(commit.as_raw_chunks())
- self.assertEqual(commit, d)
-
- def test_deserialize_mergetags(self):
- tag = make_object(
- Tag,
- object=(Commit, b"a38d6181ff27824c79fc7df825164a212eff6a3f"),
- object_type_name=b"commit",
- name=b"v2.6.22-rc7",
- tag_time=1183319674,
- tag_timezone=0,
- tagger=b"Linus Torvalds <torvalds@woody.linux-foundation.org>",
- message=default_message,
- )
- commit = self.make_commit(mergetag=[tag, tag])
-
- d = Commit()
- d._deserialize(commit.as_raw_chunks())
- self.assertEqual(commit, d)
-
-
-default_committer = b"James Westby <jw+debian@jameswestby.net> 1174773719 +0000"
-
-
-class CommitParseTests(ShaFileCheckTests):
- def make_commit_lines(
- self,
- tree=b"d80c186a03f423a81b39df39dc87fd269736ca86",
- parents=[
- b"ab64bbdcc51b170d21588e5c5d391ee5c0c96dfd",
- b"4cffe90e0a41ad3f5190079d7c8f036bde29cbe6",
- ],
- author=default_committer,
- committer=default_committer,
- encoding=None,
- message=b"Merge ../b\n",
- extra=None,
- ):
- lines = []
- if tree is not None:
- lines.append(b"tree " + tree)
- if parents is not None:
- lines.extend(b"parent " + p for p in parents)
- if author is not None:
- lines.append(b"author " + author)
- if committer is not None:
- lines.append(b"committer " + committer)
- if encoding is not None:
- lines.append(b"encoding " + encoding)
- if extra is not None:
- for name, value in sorted(extra.items()):
- lines.append(name + b" " + value)
- lines.append(b"")
- if message is not None:
- lines.append(message)
- return lines
-
- def make_commit_text(self, **kwargs):
- return b"\n".join(self.make_commit_lines(**kwargs))
-
- def test_simple(self):
- c = Commit.from_string(self.make_commit_text())
- self.assertEqual(b"Merge ../b\n", c.message)
- self.assertEqual(b"James Westby <jw+debian@jameswestby.net>", c.author)
- self.assertEqual(b"James Westby <jw+debian@jameswestby.net>", c.committer)
- self.assertEqual(b"d80c186a03f423a81b39df39dc87fd269736ca86", c.tree)
- self.assertEqual(
- [
- b"ab64bbdcc51b170d21588e5c5d391ee5c0c96dfd",
- b"4cffe90e0a41ad3f5190079d7c8f036bde29cbe6",
- ],
- c.parents,
- )
- expected_time = datetime.datetime(2007, 3, 24, 22, 1, 59)
- self.assertEqual(
- expected_time, datetime.datetime.utcfromtimestamp(c.commit_time)
- )
- self.assertEqual(0, c.commit_timezone)
- self.assertEqual(
- expected_time, datetime.datetime.utcfromtimestamp(c.author_time)
- )
- self.assertEqual(0, c.author_timezone)
- self.assertEqual(None, c.encoding)
-
- def test_custom(self):
- c = Commit.from_string(self.make_commit_text(extra={b"extra-field": b"data"}))
- self.assertEqual([(b"extra-field", b"data")], c._extra)
-
- def test_encoding(self):
- c = Commit.from_string(self.make_commit_text(encoding=b"UTF-8"))
- self.assertEqual(b"UTF-8", c.encoding)
-
- def test_check(self):
- self.assertCheckSucceeds(Commit, self.make_commit_text())
- self.assertCheckSucceeds(Commit, self.make_commit_text(parents=None))
- self.assertCheckSucceeds(Commit, self.make_commit_text(encoding=b"UTF-8"))
-
- self.assertCheckFails(Commit, self.make_commit_text(tree=b"xxx"))
- self.assertCheckFails(Commit, self.make_commit_text(parents=[a_sha, b"xxx"]))
- bad_committer = b"some guy without an email address 1174773719 +0000"
- self.assertCheckFails(Commit, self.make_commit_text(committer=bad_committer))
- self.assertCheckFails(Commit, self.make_commit_text(author=bad_committer))
- self.assertCheckFails(Commit, self.make_commit_text(author=None))
- self.assertCheckFails(Commit, self.make_commit_text(committer=None))
- self.assertCheckFails(
- Commit, self.make_commit_text(author=None, committer=None)
- )
-
- def test_check_duplicates(self):
- # duplicate each of the header fields
- for i in range(5):
- lines = self.make_commit_lines(parents=[a_sha], encoding=b"UTF-8")
- lines.insert(i, lines[i])
- text = b"\n".join(lines)
- if lines[i].startswith(b"parent"):
- # duplicate parents are ok for now
- self.assertCheckSucceeds(Commit, text)
- else:
- self.assertCheckFails(Commit, text)
-
- def test_check_order(self):
- lines = self.make_commit_lines(parents=[a_sha], encoding=b"UTF-8")
- headers = lines[:5]
- rest = lines[5:]
- # of all possible permutations, ensure only the original succeeds
- for perm in permutations(headers):
- perm = list(perm)
- text = b"\n".join(perm + rest)
- if perm == headers:
- self.assertCheckSucceeds(Commit, text)
- else:
- self.assertCheckFails(Commit, text)
-
- def test_check_commit_with_unparseable_time(self):
- identity_with_wrong_time = (
- b"Igor Sysoev <igor@sysoev.ru> 18446743887488505614+42707004"
- )
-
- # Those fail at reading time
- self.assertCheckFails(
- Commit,
- self.make_commit_text(
- author=default_committer, committer=identity_with_wrong_time
- ),
- )
- self.assertCheckFails(
- Commit,
- self.make_commit_text(
- author=identity_with_wrong_time, committer=default_committer
- ),
- )
-
- def test_check_commit_with_overflow_date(self):
- """Date with overflow should raise an ObjectFormatException when checked."""
- identity_with_wrong_time = (
- b"Igor Sysoev <igor@sysoev.ru> 18446743887488505614 +42707004"
- )
- commit0 = Commit.from_string(
- self.make_commit_text(
- author=identity_with_wrong_time, committer=default_committer
- )
- )
- commit1 = Commit.from_string(
- self.make_commit_text(
- author=default_committer, committer=identity_with_wrong_time
- )
- )
-
- # Those fails when triggering the check() method
- for commit in [commit0, commit1]:
- with self.assertRaises(ObjectFormatException):
- commit.check()
-
- def test_mangled_author_line(self):
- """Mangled author line should successfully parse."""
- author_line = (
- b'Karl MacMillan <kmacmill@redhat.com> <"Karl MacMillan '
- b'<kmacmill@redhat.com>"> 1197475547 -0500'
- )
- expected_identity = (
- b'Karl MacMillan <kmacmill@redhat.com> <"Karl MacMillan '
- b'<kmacmill@redhat.com>">'
- )
- commit = Commit.from_string(self.make_commit_text(author=author_line))
-
- # The commit parses properly
- self.assertEqual(commit.author, expected_identity)
-
- # But the check fails because the author identity is bogus
- with self.assertRaises(ObjectFormatException):
- commit.check()
-
- def test_parse_gpgsig(self):
- c = Commit.from_string(
- b"""tree aaff74984cccd156a469afa7d9ab10e4777beb24
-author Jelmer Vernooij <jelmer@samba.org> 1412179807 +0200
-committer Jelmer Vernooij <jelmer@samba.org> 1412179807 +0200
-gpgsig -----BEGIN PGP SIGNATURE-----
- Version: GnuPG v1
-
- iQIcBAABCgAGBQJULCdfAAoJEACAbyvXKaRXuKwP/RyP9PA49uAvu8tQVCC/uBa8
- vi975+xvO14R8Pp8k2nps7lSxCdtCd+xVT1VRHs0wNhOZo2YCVoU1HATkPejqSeV
- NScTHcxnk4/+bxyfk14xvJkNp7FlQ3npmBkA+lbV0Ubr33rvtIE5jiJPyz+SgWAg
- xdBG2TojV0squj00GoH/euK6aX7GgZtwdtpTv44haCQdSuPGDcI4TORqR6YSqvy3
- GPE+3ZqXPFFb+KILtimkxitdwB7CpwmNse2vE3rONSwTvi8nq3ZoQYNY73CQGkUy
- qoFU0pDtw87U3niFin1ZccDgH0bB6624sLViqrjcbYJeg815Htsu4rmzVaZADEVC
- XhIO4MThebusdk0AcNGjgpf3HRHk0DPMDDlIjm+Oao0cqovvF6VyYmcb0C+RmhJj
- dodLXMNmbqErwTk3zEkW0yZvNIYXH7m9SokPCZa4eeIM7be62X6h1mbt0/IU6Th+
- v18fS0iTMP/Viug5und+05C/v04kgDo0CPphAbXwWMnkE4B6Tl9sdyUYXtvQsL7x
- 0+WP1gL27ANqNZiI07Kz/BhbBAQI/+2TFT7oGr0AnFPQ5jHp+3GpUf6OKuT1wT3H
- ND189UFuRuubxb42vZhpcXRbqJVWnbECTKVUPsGZqat3enQUB63uM4i6/RdONDZA
- fDeF1m4qYs+cUXKNUZ03
- =X6RT
- -----END PGP SIGNATURE-----
-
-foo
-"""
- )
- self.assertEqual(b"foo\n", c.message)
- self.assertEqual([], c._extra)
- self.assertEqual(
- b"""-----BEGIN PGP SIGNATURE-----
-Version: GnuPG v1
-
-iQIcBAABCgAGBQJULCdfAAoJEACAbyvXKaRXuKwP/RyP9PA49uAvu8tQVCC/uBa8
-vi975+xvO14R8Pp8k2nps7lSxCdtCd+xVT1VRHs0wNhOZo2YCVoU1HATkPejqSeV
-NScTHcxnk4/+bxyfk14xvJkNp7FlQ3npmBkA+lbV0Ubr33rvtIE5jiJPyz+SgWAg
-xdBG2TojV0squj00GoH/euK6aX7GgZtwdtpTv44haCQdSuPGDcI4TORqR6YSqvy3
-GPE+3ZqXPFFb+KILtimkxitdwB7CpwmNse2vE3rONSwTvi8nq3ZoQYNY73CQGkUy
-qoFU0pDtw87U3niFin1ZccDgH0bB6624sLViqrjcbYJeg815Htsu4rmzVaZADEVC
-XhIO4MThebusdk0AcNGjgpf3HRHk0DPMDDlIjm+Oao0cqovvF6VyYmcb0C+RmhJj
-dodLXMNmbqErwTk3zEkW0yZvNIYXH7m9SokPCZa4eeIM7be62X6h1mbt0/IU6Th+
-v18fS0iTMP/Viug5und+05C/v04kgDo0CPphAbXwWMnkE4B6Tl9sdyUYXtvQsL7x
-0+WP1gL27ANqNZiI07Kz/BhbBAQI/+2TFT7oGr0AnFPQ5jHp+3GpUf6OKuT1wT3H
-ND189UFuRuubxb42vZhpcXRbqJVWnbECTKVUPsGZqat3enQUB63uM4i6/RdONDZA
-fDeF1m4qYs+cUXKNUZ03
-=X6RT
------END PGP SIGNATURE-----""",
- c.gpgsig,
- )
-
- def test_parse_header_trailing_newline(self):
- c = Commit.from_string(
- b"""\
-tree a7d6277f78d3ecd0230a1a5df6db00b1d9c521ac
-parent c09b6dec7a73760fbdb478383a3c926b18db8bbe
-author Neil Matatall <oreoshake@github.com> 1461964057 -1000
-committer Neil Matatall <oreoshake@github.com> 1461964057 -1000
-gpgsig -----BEGIN PGP SIGNATURE-----
-
- wsBcBAABCAAQBQJXI80ZCRA6pcNDcVZ70gAAarcIABs72xRX3FWeox349nh6ucJK
- CtwmBTusez2Zwmq895fQEbZK7jpaGO5TRO4OvjFxlRo0E08UFx3pxZHSpj6bsFeL
- hHsDXnCaotphLkbgKKRdGZo7tDqM84wuEDlh4MwNe7qlFC7bYLDyysc81ZX5lpMm
- 2MFF1TvjLAzSvkT7H1LPkuR3hSvfCYhikbPOUNnKOo0sYjeJeAJ/JdAVQ4mdJIM0
- gl3REp9+A+qBEpNQI7z94Pg5Bc5xenwuDh3SJgHvJV6zBWupWcdB3fAkVd4TPnEZ
- nHxksHfeNln9RKseIDcy4b2ATjhDNIJZARHNfr6oy4u3XPW4svRqtBsLoMiIeuI=
- =ms6q
- -----END PGP SIGNATURE-----
-
-
-3.3.0 version bump and docs
-"""
- )
- self.assertEqual([], c._extra)
- self.assertEqual(
- b"""\
------BEGIN PGP SIGNATURE-----
-
-wsBcBAABCAAQBQJXI80ZCRA6pcNDcVZ70gAAarcIABs72xRX3FWeox349nh6ucJK
-CtwmBTusez2Zwmq895fQEbZK7jpaGO5TRO4OvjFxlRo0E08UFx3pxZHSpj6bsFeL
-hHsDXnCaotphLkbgKKRdGZo7tDqM84wuEDlh4MwNe7qlFC7bYLDyysc81ZX5lpMm
-2MFF1TvjLAzSvkT7H1LPkuR3hSvfCYhikbPOUNnKOo0sYjeJeAJ/JdAVQ4mdJIM0
-gl3REp9+A+qBEpNQI7z94Pg5Bc5xenwuDh3SJgHvJV6zBWupWcdB3fAkVd4TPnEZ
-nHxksHfeNln9RKseIDcy4b2ATjhDNIJZARHNfr6oy4u3XPW4svRqtBsLoMiIeuI=
-=ms6q
------END PGP SIGNATURE-----\n""",
- c.gpgsig,
- )
-
-
-_TREE_ITEMS = {
- b"a.c": (0o100755, b"d80c186a03f423a81b39df39dc87fd269736ca86"),
- b"a": (stat.S_IFDIR, b"d80c186a03f423a81b39df39dc87fd269736ca86"),
- b"a/c": (stat.S_IFDIR, b"d80c186a03f423a81b39df39dc87fd269736ca86"),
-}
-
-_SORTED_TREE_ITEMS = [
- TreeEntry(b"a.c", 0o100755, b"d80c186a03f423a81b39df39dc87fd269736ca86"),
- TreeEntry(b"a", stat.S_IFDIR, b"d80c186a03f423a81b39df39dc87fd269736ca86"),
- TreeEntry(b"a/c", stat.S_IFDIR, b"d80c186a03f423a81b39df39dc87fd269736ca86"),
-]
-
-
-class TreeTests(ShaFileCheckTests):
- def test_add(self):
- myhexsha = b"d80c186a03f423a81b39df39dc87fd269736ca86"
- x = Tree()
- x.add(b"myname", 0o100755, myhexsha)
- self.assertEqual(x[b"myname"], (0o100755, myhexsha))
- self.assertEqual(b"100755 myname\0" + hex_to_sha(myhexsha), x.as_raw_string())
-
- def test_simple(self):
- myhexsha = b"d80c186a03f423a81b39df39dc87fd269736ca86"
- x = Tree()
- x[b"myname"] = (0o100755, myhexsha)
- self.assertEqual(b"100755 myname\0" + hex_to_sha(myhexsha), x.as_raw_string())
- self.assertEqual(b"100755 myname\0" + hex_to_sha(myhexsha), bytes(x))
-
- def test_tree_update_id(self):
- x = Tree()
- x[b"a.c"] = (0o100755, b"d80c186a03f423a81b39df39dc87fd269736ca86")
- self.assertEqual(b"0c5c6bc2c081accfbc250331b19e43b904ab9cdd", x.id)
- x[b"a.b"] = (stat.S_IFDIR, b"d80c186a03f423a81b39df39dc87fd269736ca86")
- self.assertEqual(b"07bfcb5f3ada15bbebdfa3bbb8fd858a363925c8", x.id)
-
- def test_tree_iteritems_dir_sort(self):
- x = Tree()
- for name, item in _TREE_ITEMS.items():
- x[name] = item
- self.assertEqual(_SORTED_TREE_ITEMS, x.items())
-
- def test_tree_items_dir_sort(self):
- x = Tree()
- for name, item in _TREE_ITEMS.items():
- x[name] = item
- self.assertEqual(_SORTED_TREE_ITEMS, x.items())
-
- def _do_test_parse_tree(self, parse_tree):
- dir = os.path.join(os.path.dirname(__file__), "..", "..", "testdata", "trees")
- o = Tree.from_path(hex_to_filename(dir, tree_sha))
- self.assertEqual(
- [(b"a", 0o100644, a_sha), (b"b", 0o100644, b_sha)],
- list(parse_tree(o.as_raw_string())),
- )
- # test a broken tree that has a leading 0 on the file mode
- broken_tree = b"0100644 foo\0" + hex_to_sha(a_sha)
-
- def eval_parse_tree(*args, **kwargs):
- return list(parse_tree(*args, **kwargs))
-
- self.assertEqual([(b"foo", 0o100644, a_sha)], eval_parse_tree(broken_tree))
- self.assertRaises(
- ObjectFormatException, eval_parse_tree, broken_tree, strict=True
- )
-
- test_parse_tree = functest_builder(_do_test_parse_tree, _parse_tree_py)
- test_parse_tree_extension = ext_functest_builder(_do_test_parse_tree, parse_tree)
-
- def _do_test_sorted_tree_items(self, sorted_tree_items):
- def do_sort(entries):
- return list(sorted_tree_items(entries, False))
-
- actual = do_sort(_TREE_ITEMS)
- self.assertEqual(_SORTED_TREE_ITEMS, actual)
- self.assertIsInstance(actual[0], TreeEntry)
-
- # C/Python implementations may differ in specific error types, but
- # should all error on invalid inputs.
- # For example, the C implementation has stricter type checks, so may
- # raise TypeError where the Python implementation raises
- # AttributeError.
- errors = (TypeError, ValueError, AttributeError)
- self.assertRaises(errors, do_sort, b"foo")
- self.assertRaises(errors, do_sort, {b"foo": (1, 2, 3)})
-
- myhexsha = b"d80c186a03f423a81b39df39dc87fd269736ca86"
- self.assertRaises(errors, do_sort, {b"foo": (b"xxx", myhexsha)})
- self.assertRaises(errors, do_sort, {b"foo": (0o100755, 12345)})
-
- test_sorted_tree_items = functest_builder(
- _do_test_sorted_tree_items, _sorted_tree_items_py
- )
- test_sorted_tree_items_extension = ext_functest_builder(
- _do_test_sorted_tree_items, sorted_tree_items
- )
-
- def _do_test_sorted_tree_items_name_order(self, sorted_tree_items):
- self.assertEqual(
- [
- TreeEntry(
- b"a",
- stat.S_IFDIR,
- b"d80c186a03f423a81b39df39dc87fd269736ca86",
- ),
- TreeEntry(
- b"a.c",
- 0o100755,
- b"d80c186a03f423a81b39df39dc87fd269736ca86",
- ),
- TreeEntry(
- b"a/c",
- stat.S_IFDIR,
- b"d80c186a03f423a81b39df39dc87fd269736ca86",
- ),
- ],
- list(sorted_tree_items(_TREE_ITEMS, True)),
- )
-
- test_sorted_tree_items_name_order = functest_builder(
- _do_test_sorted_tree_items_name_order, _sorted_tree_items_py
- )
- test_sorted_tree_items_name_order_extension = ext_functest_builder(
- _do_test_sorted_tree_items_name_order, sorted_tree_items
- )
-
- def test_check(self):
- t = Tree
- sha = hex_to_sha(a_sha)
-
- # filenames
- self.assertCheckSucceeds(t, b"100644 .a\0" + sha)
- self.assertCheckFails(t, b"100644 \0" + sha)
- self.assertCheckFails(t, b"100644 .\0" + sha)
- self.assertCheckFails(t, b"100644 a/a\0" + sha)
- self.assertCheckFails(t, b"100644 ..\0" + sha)
- self.assertCheckFails(t, b"100644 .git\0" + sha)
-
- # modes
- self.assertCheckSucceeds(t, b"100644 a\0" + sha)
- self.assertCheckSucceeds(t, b"100755 a\0" + sha)
- self.assertCheckSucceeds(t, b"160000 a\0" + sha)
- # TODO more whitelisted modes
- self.assertCheckFails(t, b"123456 a\0" + sha)
- self.assertCheckFails(t, b"123abc a\0" + sha)
- # should fail check, but parses ok
- self.assertCheckFails(t, b"0100644 foo\0" + sha)
-
- # shas
- self.assertCheckFails(t, b"100644 a\0" + (b"x" * 5))
- self.assertCheckFails(t, b"100644 a\0" + (b"x" * 18) + b"\0")
- self.assertCheckFails(t, b"100644 a\0" + (b"x" * 21) + b"\n100644 b\0" + sha)
-
- # ordering
- sha2 = hex_to_sha(b_sha)
- self.assertCheckSucceeds(t, b"100644 a\0" + sha + b"100644 b\0" + sha)
- self.assertCheckSucceeds(t, b"100644 a\0" + sha + b"100644 b\0" + sha2)
- self.assertCheckFails(t, b"100644 a\0" + sha + b"100755 a\0" + sha2)
- self.assertCheckFails(t, b"100644 b\0" + sha2 + b"100644 a\0" + sha)
-
- def test_iter(self):
- t = Tree()
- t[b"foo"] = (0o100644, a_sha)
- self.assertEqual({b"foo"}, set(t))
-
-
-class TagSerializeTests(TestCase):
- def test_serialize_simple(self):
- x = make_object(
- Tag,
- tagger=b"Jelmer Vernooij <jelmer@samba.org>",
- name=b"0.1",
- message=b"Tag 0.1",
- object=(Blob, b"d80c186a03f423a81b39df39dc87fd269736ca86"),
- tag_time=423423423,
- tag_timezone=0,
- )
- self.assertEqual(
- (
- b"object d80c186a03f423a81b39df39dc87fd269736ca86\n"
- b"type blob\n"
- b"tag 0.1\n"
- b"tagger Jelmer Vernooij <jelmer@samba.org> "
- b"423423423 +0000\n"
- b"\n"
- b"Tag 0.1"
- ),
- x.as_raw_string(),
- )
-
- def test_serialize_none_message(self):
- x = make_object(
- Tag,
- tagger=b"Jelmer Vernooij <jelmer@samba.org>",
- name=b"0.1",
- message=None,
- object=(Blob, b"d80c186a03f423a81b39df39dc87fd269736ca86"),
- tag_time=423423423,
- tag_timezone=0,
- )
- self.assertEqual(
- (
- b"object d80c186a03f423a81b39df39dc87fd269736ca86\n"
- b"type blob\n"
- b"tag 0.1\n"
- b"tagger Jelmer Vernooij <jelmer@samba.org> "
- b"423423423 +0000\n"
- ),
- x.as_raw_string(),
- )
-
-
-default_tagger = (
- b"Linus Torvalds <torvalds@woody.linux-foundation.org> " b"1183319674 -0700"
-)
-default_message = b"""Linux 2.6.22-rc7
------BEGIN PGP SIGNATURE-----
-Version: GnuPG v1.4.7 (GNU/Linux)
-
-iD8DBQBGiAaAF3YsRnbiHLsRAitMAKCiLboJkQECM/jpYsY3WPfvUgLXkACgg3ql
-OK2XeQOiEeXtT76rV4t2WR4=
-=ivrA
------END PGP SIGNATURE-----
-"""
-
-
-class TagParseTests(ShaFileCheckTests):
- def make_tag_lines(
- self,
- object_sha=b"a38d6181ff27824c79fc7df825164a212eff6a3f",
- object_type_name=b"commit",
- name=b"v2.6.22-rc7",
- tagger=default_tagger,
- message=default_message,
- ):
- lines = []
- if object_sha is not None:
- lines.append(b"object " + object_sha)
- if object_type_name is not None:
- lines.append(b"type " + object_type_name)
- if name is not None:
- lines.append(b"tag " + name)
- if tagger is not None:
- lines.append(b"tagger " + tagger)
- if message is not None:
- lines.append(b"")
- lines.append(message)
- return lines
-
- def make_tag_text(self, **kwargs):
- return b"\n".join(self.make_tag_lines(**kwargs))
-
- def test_parse(self):
- x = Tag()
- x.set_raw_string(self.make_tag_text())
- self.assertEqual(
- b"Linus Torvalds <torvalds@woody.linux-foundation.org>", x.tagger
- )
- self.assertEqual(b"v2.6.22-rc7", x.name)
- object_type, object_sha = x.object
- self.assertEqual(b"a38d6181ff27824c79fc7df825164a212eff6a3f", object_sha)
- self.assertEqual(Commit, object_type)
- self.assertEqual(
- datetime.datetime.utcfromtimestamp(x.tag_time),
- datetime.datetime(2007, 7, 1, 19, 54, 34),
- )
- self.assertEqual(-25200, x.tag_timezone)
-
- def test_parse_no_tagger(self):
- x = Tag()
- x.set_raw_string(self.make_tag_text(tagger=None))
- self.assertEqual(None, x.tagger)
- self.assertEqual(b"v2.6.22-rc7", x.name)
- self.assertEqual(None, x.tag_time)
-
- def test_parse_no_message(self):
- x = Tag()
- x.set_raw_string(self.make_tag_text(message=None))
- self.assertEqual(None, x.message)
- self.assertEqual(
- b"Linus Torvalds <torvalds@woody.linux-foundation.org>", x.tagger
- )
- self.assertEqual(
- datetime.datetime.utcfromtimestamp(x.tag_time),
- datetime.datetime(2007, 7, 1, 19, 54, 34),
- )
- self.assertEqual(-25200, x.tag_timezone)
- self.assertEqual(b"v2.6.22-rc7", x.name)
-
- def test_check(self):
- self.assertCheckSucceeds(Tag, self.make_tag_text())
- self.assertCheckFails(Tag, self.make_tag_text(object_sha=None))
- self.assertCheckFails(Tag, self.make_tag_text(object_type_name=None))
- self.assertCheckFails(Tag, self.make_tag_text(name=None))
- self.assertCheckFails(Tag, self.make_tag_text(name=b""))
- self.assertCheckFails(Tag, self.make_tag_text(object_type_name=b"foobar"))
- self.assertCheckFails(
- Tag,
- self.make_tag_text(
- tagger=b"some guy without an email address 1183319674 -0700"
- ),
- )
- self.assertCheckFails(
- Tag,
- self.make_tag_text(
- tagger=(
- b"Linus Torvalds <torvalds@woody.linux-foundation.org> "
- b"Sun 7 Jul 2007 12:54:34 +0700"
- )
- ),
- )
- self.assertCheckFails(Tag, self.make_tag_text(object_sha=b"xxx"))
-
- def test_check_tag_with_unparseable_field(self):
- self.assertCheckFails(
- Tag,
- self.make_tag_text(
- tagger=(
- b"Linus Torvalds <torvalds@woody.linux-foundation.org> "
- b"423423+0000"
- )
- ),
- )
-
- def test_check_tag_with_overflow_time(self):
- """Date with overflow should raise an ObjectFormatException when checked."""
- author = f"Some Dude <some@dude.org> {MAX_TIME + 1} +0000"
- tag = Tag.from_string(self.make_tag_text(tagger=(author.encode())))
- with self.assertRaises(ObjectFormatException):
- tag.check()
-
- def test_check_duplicates(self):
- # duplicate each of the header fields
- for i in range(4):
- lines = self.make_tag_lines()
- lines.insert(i, lines[i])
- self.assertCheckFails(Tag, b"\n".join(lines))
-
- def test_check_order(self):
- lines = self.make_tag_lines()
- headers = lines[:4]
- rest = lines[4:]
- # of all possible permutations, ensure only the original succeeds
- for perm in permutations(headers):
- perm = list(perm)
- text = b"\n".join(perm + rest)
- if perm == headers:
- self.assertCheckSucceeds(Tag, text)
- else:
- self.assertCheckFails(Tag, text)
-
- def test_tree_copy_after_update(self):
- """Check Tree.id is correctly updated when the tree is copied after updated."""
- shas = []
- tree = Tree()
- shas.append(tree.id)
- tree.add(b"data", 0o644, Blob().id)
- copied = tree.copy()
- shas.append(tree.id)
- shas.append(copied.id)
-
- self.assertNotIn(shas[0], shas[1:])
- self.assertEqual(shas[1], shas[2])
-
-
-class CheckTests(TestCase):
- def test_check_hexsha(self):
- check_hexsha(a_sha, "failed to check good sha")
- self.assertRaises(
- ObjectFormatException, check_hexsha, b"1" * 39, "sha too short"
- )
- self.assertRaises(
- ObjectFormatException, check_hexsha, b"1" * 41, "sha too long"
- )
- self.assertRaises(
- ObjectFormatException,
- check_hexsha,
- b"x" * 40,
- "invalid characters",
- )
-
- def test_check_identity(self):
- check_identity(
- b"Dave Borowitz <dborowitz@google.com>",
- "failed to check good identity",
- )
- check_identity(b" <dborowitz@google.com>", "failed to check good identity")
- self.assertRaises(
- ObjectFormatException,
- check_identity,
- b"<dborowitz@google.com>",
- "no space before email",
- )
- self.assertRaises(
- ObjectFormatException, check_identity, b"Dave Borowitz", "no email"
- )
- self.assertRaises(
- ObjectFormatException,
- check_identity,
- b"Dave Borowitz <dborowitz",
- "incomplete email",
- )
- self.assertRaises(
- ObjectFormatException,
- check_identity,
- b"dborowitz@google.com>",
- "incomplete email",
- )
- self.assertRaises(
- ObjectFormatException,
- check_identity,
- b"Dave Borowitz <<dborowitz@google.com>",
- "typo",
- )
- self.assertRaises(
- ObjectFormatException,
- check_identity,
- b"Dave Borowitz <dborowitz@google.com>>",
- "typo",
- )
- self.assertRaises(
- ObjectFormatException,
- check_identity,
- b"Dave Borowitz <dborowitz@google.com>xxx",
- "trailing characters",
- )
- self.assertRaises(
- ObjectFormatException,
- check_identity,
- b"Dave Borowitz <dborowitz@google.com>xxx",
- "trailing characters",
- )
- self.assertRaises(
- ObjectFormatException,
- check_identity,
- b"Dave<Borowitz <dborowitz@google.com>",
- "reserved byte in name",
- )
- self.assertRaises(
- ObjectFormatException,
- check_identity,
- b"Dave>Borowitz <dborowitz@google.com>",
- "reserved byte in name",
- )
- self.assertRaises(
- ObjectFormatException,
- check_identity,
- b"Dave\0Borowitz <dborowitz@google.com>",
- "null byte",
- )
- self.assertRaises(
- ObjectFormatException,
- check_identity,
- b"Dave\nBorowitz <dborowitz@google.com>",
- "newline byte",
- )
-
-
-class TimezoneTests(TestCase):
- def test_parse_timezone_utc(self):
- self.assertEqual((0, False), parse_timezone(b"+0000"))
-
- def test_parse_timezone_utc_negative(self):
- self.assertEqual((0, True), parse_timezone(b"-0000"))
-
- def test_generate_timezone_utc(self):
- self.assertEqual(b"+0000", format_timezone(0))
-
- def test_generate_timezone_utc_negative(self):
- self.assertEqual(b"-0000", format_timezone(0, True))
-
- def test_parse_timezone_cet(self):
- self.assertEqual((60 * 60, False), parse_timezone(b"+0100"))
-
- def test_format_timezone_cet(self):
- self.assertEqual(b"+0100", format_timezone(60 * 60))
-
- def test_format_timezone_pdt(self):
- self.assertEqual(b"-0400", format_timezone(-4 * 60 * 60))
-
- def test_parse_timezone_pdt(self):
- self.assertEqual((-4 * 60 * 60, False), parse_timezone(b"-0400"))
-
- def test_format_timezone_pdt_half(self):
- self.assertEqual(b"-0440", format_timezone(int(((-4 * 60) - 40) * 60)))
-
- def test_format_timezone_double_negative(self):
- self.assertEqual(b"--700", format_timezone(int((7 * 60) * 60), True))
-
- def test_parse_timezone_pdt_half(self):
- self.assertEqual((((-4 * 60) - 40) * 60, False), parse_timezone(b"-0440"))
-
- def test_parse_timezone_double_negative(self):
- self.assertEqual((int((7 * 60) * 60), False), parse_timezone(b"+700"))
- self.assertEqual((int((7 * 60) * 60), True), parse_timezone(b"--700"))
-
-
-class ShaFileCopyTests(TestCase):
- def assert_copy(self, orig):
- oclass = object_class(orig.type_num)
-
- copy = orig.copy()
- self.assertIsInstance(copy, oclass)
- self.assertEqual(copy, orig)
- self.assertIsNot(copy, orig)
-
- def test_commit_copy(self):
- attrs = {
- "tree": b"d80c186a03f423a81b39df39dc87fd269736ca86",
- "parents": [
- b"ab64bbdcc51b170d21588e5c5d391ee5c0c96dfd",
- b"4cffe90e0a41ad3f5190079d7c8f036bde29cbe6",
- ],
- "author": b"James Westby <jw+debian@jameswestby.net>",
- "committer": b"James Westby <jw+debian@jameswestby.net>",
- "commit_time": 1174773719,
- "author_time": 1174773719,
- "commit_timezone": 0,
- "author_timezone": 0,
- "message": b"Merge ../b\n",
- }
- commit = make_commit(**attrs)
- self.assert_copy(commit)
-
- def test_blob_copy(self):
- blob = make_object(Blob, data=b"i am a blob")
- self.assert_copy(blob)
-
- def test_tree_copy(self):
- blob = make_object(Blob, data=b"i am a blob")
- tree = Tree()
- tree[b"blob"] = (stat.S_IFREG, blob.id)
- self.assert_copy(tree)
-
- def test_tag_copy(self):
- tag = make_object(
- Tag,
- name=b"tag",
- message=b"",
- tagger=b"Tagger <test@example.com>",
- tag_time=12345,
- tag_timezone=0,
- object=(Commit, b"0" * 40),
- )
- self.assert_copy(tag)
-
-
-class ShaFileSerializeTests(TestCase):
- """`ShaFile` objects only gets serialized once if they haven't changed."""
-
- @contextmanager
- def assert_serialization_on_change(
- self, obj, needs_serialization_after_change=True
- ):
- old_id = obj.id
- self.assertFalse(obj._needs_serialization)
-
- yield obj
-
- if needs_serialization_after_change:
- self.assertTrue(obj._needs_serialization)
- else:
- self.assertFalse(obj._needs_serialization)
- new_id = obj.id
- self.assertFalse(obj._needs_serialization)
- self.assertNotEqual(old_id, new_id)
-
- def test_commit_serialize(self):
- attrs = {
- "tree": b"d80c186a03f423a81b39df39dc87fd269736ca86",
- "parents": [
- b"ab64bbdcc51b170d21588e5c5d391ee5c0c96dfd",
- b"4cffe90e0a41ad3f5190079d7c8f036bde29cbe6",
- ],
- "author": b"James Westby <jw+debian@jameswestby.net>",
- "committer": b"James Westby <jw+debian@jameswestby.net>",
- "commit_time": 1174773719,
- "author_time": 1174773719,
- "commit_timezone": 0,
- "author_timezone": 0,
- "message": b"Merge ../b\n",
- }
- commit = make_commit(**attrs)
-
- with self.assert_serialization_on_change(commit):
- commit.parents = [b"ab64bbdcc51b170d21588e5c5d391ee5c0c96dfd"]
-
- def test_blob_serialize(self):
- blob = make_object(Blob, data=b"i am a blob")
-
- with self.assert_serialization_on_change(
- blob, needs_serialization_after_change=False
- ):
- blob.data = b"i am another blob"
-
- def test_tree_serialize(self):
- blob = make_object(Blob, data=b"i am a blob")
- tree = Tree()
- tree[b"blob"] = (stat.S_IFREG, blob.id)
-
- with self.assert_serialization_on_change(tree):
- tree[b"blob2"] = (stat.S_IFREG, blob.id)
-
- def test_tag_serialize(self):
- tag = make_object(
- Tag,
- name=b"tag",
- message=b"",
- tagger=b"Tagger <test@example.com>",
- tag_time=12345,
- tag_timezone=0,
- object=(Commit, b"0" * 40),
- )
-
- with self.assert_serialization_on_change(tag):
- tag.message = b"new message"
-
- def test_tag_serialize_time_error(self):
- with self.assertRaises(ObjectFormatException):
- tag = make_object(
- Tag,
- name=b"tag",
- message=b"some message",
- tagger=b"Tagger <test@example.com> 1174773719+0000",
- object=(Commit, b"0" * 40),
- )
- tag._deserialize(tag._serialize())
-
-
-class PrettyFormatTreeEntryTests(TestCase):
- def test_format(self):
- self.assertEqual(
- "40000 tree 40820c38cfb182ce6c8b261555410d8382a5918b\tfoo\n",
- pretty_format_tree_entry(
- b"foo", 0o40000, b"40820c38cfb182ce6c8b261555410d8382a5918b"
- ),
- )
blob - 0e3476e557bbff68354ddb77cb071d540bd9c559 (mode 644)
blob + /dev/null
--- dulwich/tests/test_objectspec.py
+++ /dev/null
-# test_objectspec.py -- tests for objectspec.py
-# Copyright (C) 2014 Jelmer Vernooij <jelmer@jelmer.uk>
-#
-# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
-# General Public License as public by the Free Software Foundation; version 2.0
-# or (at your option) any later version. You can redistribute it and/or
-# modify it under the terms of either of these two licenses.
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# You should have received a copy of the licenses; if not, see
-# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
-# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
-# License, Version 2.0.
-#
-
-"""Tests for revision spec parsing."""
-
-# TODO: Round-trip parse-serialize-parse and serialize-parse-serialize tests.
-
-from dulwich.tests import TestCase
-
-from ..objects import Blob
-from ..objectspec import (
- parse_commit,
- parse_commit_range,
- parse_object,
- parse_ref,
- parse_refs,
- parse_reftuple,
- parse_reftuples,
- parse_tree,
-)
-from ..repo import MemoryRepo
-from .utils import build_commit_graph
-
-
-class ParseObjectTests(TestCase):
- """Test parse_object."""
-
- def test_nonexistent(self):
- r = MemoryRepo()
- self.assertRaises(KeyError, parse_object, r, "thisdoesnotexist")
-
- def test_blob_by_sha(self):
- r = MemoryRepo()
- b = Blob.from_string(b"Blah")
- r.object_store.add_object(b)
- self.assertEqual(b, parse_object(r, b.id))
-
-
-class ParseCommitRangeTests(TestCase):
- """Test parse_commit_range."""
-
- def test_nonexistent(self):
- r = MemoryRepo()
- self.assertRaises(KeyError, parse_commit_range, r, "thisdoesnotexist")
-
- def test_commit_by_sha(self):
- r = MemoryRepo()
- c1, c2, c3 = build_commit_graph(r.object_store, [[1], [2, 1], [3, 1, 2]])
- self.assertEqual([c1], list(parse_commit_range(r, c1.id)))
-
-
-class ParseCommitTests(TestCase):
- """Test parse_commit."""
-
- def test_nonexistent(self):
- r = MemoryRepo()
- self.assertRaises(KeyError, parse_commit, r, "thisdoesnotexist")
-
- def test_commit_by_sha(self):
- r = MemoryRepo()
- [c1] = build_commit_graph(r.object_store, [[1]])
- self.assertEqual(c1, parse_commit(r, c1.id))
-
- def test_commit_by_short_sha(self):
- r = MemoryRepo()
- [c1] = build_commit_graph(r.object_store, [[1]])
- self.assertEqual(c1, parse_commit(r, c1.id[:10]))
-
-
-class ParseRefTests(TestCase):
- def test_nonexistent(self):
- r = {}
- self.assertRaises(KeyError, parse_ref, r, b"thisdoesnotexist")
-
- def test_ambiguous_ref(self):
- r = {
- b"ambig1": "bla",
- b"refs/ambig1": "bla",
- b"refs/tags/ambig1": "bla",
- b"refs/heads/ambig1": "bla",
- b"refs/remotes/ambig1": "bla",
- b"refs/remotes/ambig1/HEAD": "bla",
- }
- self.assertEqual(b"ambig1", parse_ref(r, b"ambig1"))
-
- def test_ambiguous_ref2(self):
- r = {
- b"refs/ambig2": "bla",
- b"refs/tags/ambig2": "bla",
- b"refs/heads/ambig2": "bla",
- b"refs/remotes/ambig2": "bla",
- b"refs/remotes/ambig2/HEAD": "bla",
- }
- self.assertEqual(b"refs/ambig2", parse_ref(r, b"ambig2"))
-
- def test_ambiguous_tag(self):
- r = {
- b"refs/tags/ambig3": "bla",
- b"refs/heads/ambig3": "bla",
- b"refs/remotes/ambig3": "bla",
- b"refs/remotes/ambig3/HEAD": "bla",
- }
- self.assertEqual(b"refs/tags/ambig3", parse_ref(r, b"ambig3"))
-
- def test_ambiguous_head(self):
- r = {
- b"refs/heads/ambig4": "bla",
- b"refs/remotes/ambig4": "bla",
- b"refs/remotes/ambig4/HEAD": "bla",
- }
- self.assertEqual(b"refs/heads/ambig4", parse_ref(r, b"ambig4"))
-
- def test_ambiguous_remote(self):
- r = {b"refs/remotes/ambig5": "bla", b"refs/remotes/ambig5/HEAD": "bla"}
- self.assertEqual(b"refs/remotes/ambig5", parse_ref(r, b"ambig5"))
-
- def test_ambiguous_remote_head(self):
- r = {b"refs/remotes/ambig6/HEAD": "bla"}
- self.assertEqual(b"refs/remotes/ambig6/HEAD", parse_ref(r, b"ambig6"))
-
- def test_heads_full(self):
- r = {b"refs/heads/foo": "bla"}
- self.assertEqual(b"refs/heads/foo", parse_ref(r, b"refs/heads/foo"))
-
- def test_heads_partial(self):
- r = {b"refs/heads/foo": "bla"}
- self.assertEqual(b"refs/heads/foo", parse_ref(r, b"heads/foo"))
-
- def test_tags_partial(self):
- r = {b"refs/tags/foo": "bla"}
- self.assertEqual(b"refs/tags/foo", parse_ref(r, b"tags/foo"))
-
-
-class ParseRefsTests(TestCase):
- def test_nonexistent(self):
- r = {}
- self.assertRaises(KeyError, parse_refs, r, [b"thisdoesnotexist"])
-
- def test_head(self):
- r = {b"refs/heads/foo": "bla"}
- self.assertEqual([b"refs/heads/foo"], parse_refs(r, [b"foo"]))
-
- def test_full(self):
- r = {b"refs/heads/foo": "bla"}
- self.assertEqual([b"refs/heads/foo"], parse_refs(r, b"refs/heads/foo"))
-
-
-class ParseReftupleTests(TestCase):
- def test_nonexistent(self):
- r = {}
- self.assertRaises(KeyError, parse_reftuple, r, r, b"thisdoesnotexist")
-
- def test_head(self):
- r = {b"refs/heads/foo": "bla"}
- self.assertEqual(
- (b"refs/heads/foo", b"refs/heads/foo", False),
- parse_reftuple(r, r, b"foo"),
- )
- self.assertEqual(
- (b"refs/heads/foo", b"refs/heads/foo", True),
- parse_reftuple(r, r, b"+foo"),
- )
- self.assertEqual(
- (b"refs/heads/foo", b"refs/heads/foo", True),
- parse_reftuple(r, {}, b"+foo"),
- )
- self.assertEqual(
- (b"refs/heads/foo", b"refs/heads/foo", True),
- parse_reftuple(r, {}, b"foo", True),
- )
-
- def test_full(self):
- r = {b"refs/heads/foo": "bla"}
- self.assertEqual(
- (b"refs/heads/foo", b"refs/heads/foo", False),
- parse_reftuple(r, r, b"refs/heads/foo"),
- )
-
- def test_no_left_ref(self):
- r = {b"refs/heads/foo": "bla"}
- self.assertEqual(
- (None, b"refs/heads/foo", False),
- parse_reftuple(r, r, b":refs/heads/foo"),
- )
-
- def test_no_right_ref(self):
- r = {b"refs/heads/foo": "bla"}
- self.assertEqual(
- (b"refs/heads/foo", None, False),
- parse_reftuple(r, r, b"refs/heads/foo:"),
- )
-
- def test_default_with_string(self):
- r = {b"refs/heads/foo": "bla"}
- self.assertEqual(
- (b"refs/heads/foo", b"refs/heads/foo", False),
- parse_reftuple(r, r, "foo"),
- )
-
-
-class ParseReftuplesTests(TestCase):
- def test_nonexistent(self):
- r = {}
- self.assertRaises(KeyError, parse_reftuples, r, r, [b"thisdoesnotexist"])
-
- def test_head(self):
- r = {b"refs/heads/foo": "bla"}
- self.assertEqual(
- [(b"refs/heads/foo", b"refs/heads/foo", False)],
- parse_reftuples(r, r, [b"foo"]),
- )
-
- def test_full(self):
- r = {b"refs/heads/foo": "bla"}
- self.assertEqual(
- [(b"refs/heads/foo", b"refs/heads/foo", False)],
- parse_reftuples(r, r, b"refs/heads/foo"),
- )
- r = {b"refs/heads/foo": "bla"}
- self.assertEqual(
- [(b"refs/heads/foo", b"refs/heads/foo", True)],
- parse_reftuples(r, r, b"refs/heads/foo", True),
- )
-
-
-class ParseTreeTests(TestCase):
- """Test parse_tree."""
-
- def test_nonexistent(self):
- r = MemoryRepo()
- self.assertRaises(KeyError, parse_tree, r, "thisdoesnotexist")
-
- def test_from_commit(self):
- r = MemoryRepo()
- c1, c2, c3 = build_commit_graph(r.object_store, [[1], [2, 1], [3, 1, 2]])
- self.assertEqual(r[c1.tree], parse_tree(r, c1.id))
- self.assertEqual(r[c1.tree], parse_tree(r, c1.tree))
-
- def test_from_ref(self):
- r = MemoryRepo()
- c1, c2, c3 = build_commit_graph(r.object_store, [[1], [2, 1], [3, 1, 2]])
- r.refs[b"refs/heads/foo"] = c1.id
- self.assertEqual(r[c1.tree], parse_tree(r, b"foo"))
blob - d8fa4c1e33e05a332ce31bb5372fd8a10fa317e8 (mode 644)
blob + /dev/null
--- dulwich/tests/test_pack.py
+++ /dev/null
-# test_pack.py -- Tests for the handling of git packs.
-# Copyright (C) 2007 James Westby <jw+debian@jameswestby.net>
-# Copyright (C) 2008 Jelmer Vernooij <jelmer@jelmer.uk>
-#
-# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
-# General Public License as public by the Free Software Foundation; version 2.0
-# or (at your option) any later version. You can redistribute it and/or
-# modify it under the terms of either of these two licenses.
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# You should have received a copy of the licenses; if not, see
-# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
-# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
-# License, Version 2.0.
-#
-
-"""Tests for Dulwich packs."""
-
-import os
-import shutil
-import sys
-import tempfile
-import zlib
-from hashlib import sha1
-from io import BytesIO
-from typing import Set
-
-from dulwich.tests import TestCase
-
-from ..errors import ApplyDeltaError, ChecksumMismatch
-from ..file import GitFile
-from ..object_store import MemoryObjectStore
-from ..objects import Blob, Commit, Tree, hex_to_sha, sha_to_hex
-from ..pack import (
- OFS_DELTA,
- REF_DELTA,
- DeltaChainIterator,
- MemoryPackIndex,
- Pack,
- PackData,
- PackStreamReader,
- UnpackedObject,
- UnresolvedDeltas,
- _delta_encode_size,
- _encode_copy_operation,
- apply_delta,
- compute_file_sha,
- create_delta,
- deltify_pack_objects,
- load_pack_index,
- read_zlib_chunks,
- unpack_object,
- write_pack,
- write_pack_header,
- write_pack_index_v1,
- write_pack_index_v2,
- write_pack_object,
-)
-from .utils import build_pack, make_object
-
-pack1_sha = b"bc63ddad95e7321ee734ea11a7a62d314e0d7481"
-
-a_sha = b"6f670c0fb53f9463760b7295fbb814e965fb20c8"
-tree_sha = b"b2a2766a2879c209ab1176e7e778b81ae422eeaa"
-commit_sha = b"f18faa16531ac570a3fdc8c7ca16682548dafd12"
-indexmode = "0o100644" if sys.platform != "win32" else "0o100666"
-
-
-class PackTests(TestCase):
- """Base class for testing packs."""
-
- def setUp(self):
- super().setUp()
- self.tempdir = tempfile.mkdtemp()
- self.addCleanup(shutil.rmtree, self.tempdir)
-
- datadir = os.path.abspath(
- os.path.join(os.path.dirname(__file__), "../../testdata/packs")
- )
-
- def get_pack_index(self, sha):
- """Returns a PackIndex from the datadir with the given sha."""
- return load_pack_index(
- os.path.join(self.datadir, "pack-%s.idx" % sha.decode("ascii"))
- )
-
- def get_pack_data(self, sha):
- """Returns a PackData object from the datadir with the given sha."""
- return PackData(
- os.path.join(self.datadir, "pack-%s.pack" % sha.decode("ascii"))
- )
-
- def get_pack(self, sha):
- return Pack(os.path.join(self.datadir, "pack-%s" % sha.decode("ascii")))
-
- def assertSucceeds(self, func, *args, **kwargs):
- try:
- func(*args, **kwargs)
- except ChecksumMismatch as e:
- self.fail(e)
-
-
-class PackIndexTests(PackTests):
- """Class that tests the index of packfiles."""
-
- def test_object_offset(self):
- """Tests that the correct object offset is returned from the index."""
- p = self.get_pack_index(pack1_sha)
- self.assertRaises(KeyError, p.object_offset, pack1_sha)
- self.assertEqual(p.object_offset(a_sha), 178)
- self.assertEqual(p.object_offset(tree_sha), 138)
- self.assertEqual(p.object_offset(commit_sha), 12)
-
- def test_object_sha1(self):
- """Tests that the correct object offset is returned from the index."""
- p = self.get_pack_index(pack1_sha)
- self.assertRaises(KeyError, p.object_sha1, 876)
- self.assertEqual(p.object_sha1(178), hex_to_sha(a_sha))
- self.assertEqual(p.object_sha1(138), hex_to_sha(tree_sha))
- self.assertEqual(p.object_sha1(12), hex_to_sha(commit_sha))
-
- def test_index_len(self):
- p = self.get_pack_index(pack1_sha)
- self.assertEqual(3, len(p))
-
- def test_get_stored_checksum(self):
- p = self.get_pack_index(pack1_sha)
- self.assertEqual(
- b"f2848e2ad16f329ae1c92e3b95e91888daa5bd01",
- sha_to_hex(p.get_stored_checksum()),
- )
- self.assertEqual(
- b"721980e866af9a5f93ad674144e1459b8ba3e7b7",
- sha_to_hex(p.get_pack_checksum()),
- )
-
- def test_index_check(self):
- p = self.get_pack_index(pack1_sha)
- self.assertSucceeds(p.check)
-
- def test_iterentries(self):
- p = self.get_pack_index(pack1_sha)
- entries = [(sha_to_hex(s), o, c) for s, o, c in p.iterentries()]
- self.assertEqual(
- [
- (b"6f670c0fb53f9463760b7295fbb814e965fb20c8", 178, None),
- (b"b2a2766a2879c209ab1176e7e778b81ae422eeaa", 138, None),
- (b"f18faa16531ac570a3fdc8c7ca16682548dafd12", 12, None),
- ],
- entries,
- )
-
- def test_iter(self):
- p = self.get_pack_index(pack1_sha)
- self.assertEqual({tree_sha, commit_sha, a_sha}, set(p))
-
-
-class TestPackDeltas(TestCase):
- test_string1 = b"The answer was flailing in the wind"
- test_string2 = b"The answer was falling down the pipe"
- test_string3 = b"zzzzz"
-
- test_string_empty = b""
- test_string_big = b"Z" * 8192
- test_string_huge = b"Z" * 100000
-
- def _test_roundtrip(self, base, target):
- self.assertEqual(
- target, b"".join(apply_delta(base, list(create_delta(base, target))))
- )
-
- def test_nochange(self):
- self._test_roundtrip(self.test_string1, self.test_string1)
-
- def test_nochange_huge(self):
- self._test_roundtrip(self.test_string_huge, self.test_string_huge)
-
- def test_change(self):
- self._test_roundtrip(self.test_string1, self.test_string2)
-
- def test_rewrite(self):
- self._test_roundtrip(self.test_string1, self.test_string3)
-
- def test_empty_to_big(self):
- self._test_roundtrip(self.test_string_empty, self.test_string_big)
-
- def test_empty_to_huge(self):
- self._test_roundtrip(self.test_string_empty, self.test_string_huge)
-
- def test_huge_copy(self):
- self._test_roundtrip(
- self.test_string_huge + self.test_string1,
- self.test_string_huge + self.test_string2,
- )
-
- def test_dest_overflow(self):
- self.assertRaises(
- ApplyDeltaError,
- apply_delta,
- b"a" * 0x10000,
- b"\x80\x80\x04\x80\x80\x04\x80" + b"a" * 0x10000,
- )
- self.assertRaises(
- ApplyDeltaError, apply_delta, b"", b"\x00\x80\x02\xb0\x11\x11"
- )
-
- def test_pypy_issue(self):
- # Test for https://github.com/jelmer/dulwich/issues/509 /
- # https://bitbucket.org/pypy/pypy/issues/2499/cpyext-pystring_asstring-doesnt-work
- chunks = [
- b"tree 03207ccf58880a748188836155ceed72f03d65d6\n"
- b"parent 408fbab530fd4abe49249a636a10f10f44d07a21\n"
- b"author Victor Stinner <victor.stinner@gmail.com> "
- b"1421355207 +0100\n"
- b"committer Victor Stinner <victor.stinner@gmail.com> "
- b"1421355207 +0100\n"
- b"\n"
- b"Backout changeset 3a06020af8cf\n"
- b"\nStreamWriter: close() now clears the reference to the "
- b"transport\n"
- b"\nStreamWriter now raises an exception if it is closed: "
- b"write(), writelines(),\n"
- b"write_eof(), can_write_eof(), get_extra_info(), drain().\n"
- ]
- delta = [
- b"\xcd\x03\xad\x03]tree ff3c181a393d5a7270cddc01ea863818a8621ca8\n"
- b"parent 20a103cc90135494162e819f98d0edfc1f1fba6b\x91]7\x0510738"
- b"\x91\x99@\x0b10738 +0100\x93\x04\x01\xc9"
- ]
- res = apply_delta(chunks, delta)
- expected = [
- b"tree ff3c181a393d5a7270cddc01ea863818a8621ca8\n"
- b"parent 20a103cc90135494162e819f98d0edfc1f1fba6b",
- b"\nauthor Victor Stinner <victor.stinner@gmail.com> 14213",
- b"10738",
- b" +0100\ncommitter Victor Stinner <victor.stinner@gmail.com> " b"14213",
- b"10738 +0100",
- b"\n\nStreamWriter: close() now clears the reference to the "
- b"transport\n\n"
- b"StreamWriter now raises an exception if it is closed: "
- b"write(), writelines(),\n"
- b"write_eof(), can_write_eof(), get_extra_info(), drain().\n",
- ]
- self.assertEqual(b"".join(expected), b"".join(res))
-
-
-class TestPackData(PackTests):
- """Tests getting the data from the packfile."""
-
- def test_create_pack(self):
- self.get_pack_data(pack1_sha).close()
-
- def test_from_file(self):
- path = os.path.join(self.datadir, "pack-%s.pack" % pack1_sha.decode("ascii"))
- with open(path, "rb") as f:
- PackData.from_file(f, os.path.getsize(path))
-
- def test_pack_len(self):
- with self.get_pack_data(pack1_sha) as p:
- self.assertEqual(3, len(p))
-
- def test_index_check(self):
- with self.get_pack_data(pack1_sha) as p:
- self.assertSucceeds(p.check)
-
- def test_iter_unpacked(self):
- with self.get_pack_data(pack1_sha) as p:
- commit_data = (
- b"tree b2a2766a2879c209ab1176e7e778b81ae422eeaa\n"
- b"author James Westby <jw+debian@jameswestby.net> "
- b"1174945067 +0100\n"
- b"committer James Westby <jw+debian@jameswestby.net> "
- b"1174945067 +0100\n"
- b"\n"
- b"Test commit\n"
- )
- blob_sha = b"6f670c0fb53f9463760b7295fbb814e965fb20c8"
- tree_data = b"100644 a\0" + hex_to_sha(blob_sha)
- actual = list(p.iter_unpacked())
- self.assertEqual(
- [
- UnpackedObject(
- offset=12,
- pack_type_num=1,
- decomp_chunks=[commit_data],
- crc32=None,
- ),
- UnpackedObject(
- offset=138,
- pack_type_num=2,
- decomp_chunks=[tree_data],
- crc32=None,
- ),
- UnpackedObject(
- offset=178,
- pack_type_num=3,
- decomp_chunks=[b"test 1\n"],
- crc32=None,
- ),
- ],
- actual,
- )
-
- def test_iterentries(self):
- with self.get_pack_data(pack1_sha) as p:
- entries = {(sha_to_hex(s), o, c) for s, o, c in p.iterentries()}
- self.assertEqual(
- {
- (
- b"6f670c0fb53f9463760b7295fbb814e965fb20c8",
- 178,
- 1373561701,
- ),
- (
- b"b2a2766a2879c209ab1176e7e778b81ae422eeaa",
- 138,
- 912998690,
- ),
- (
- b"f18faa16531ac570a3fdc8c7ca16682548dafd12",
- 12,
- 3775879613,
- ),
- },
- entries,
- )
-
- def test_create_index_v1(self):
- with self.get_pack_data(pack1_sha) as p:
- filename = os.path.join(self.tempdir, "v1test.idx")
- p.create_index_v1(filename)
- idx1 = load_pack_index(filename)
- idx2 = self.get_pack_index(pack1_sha)
- self.assertEqual(oct(os.stat(filename).st_mode), indexmode)
- self.assertEqual(idx1, idx2)
-
- def test_create_index_v2(self):
- with self.get_pack_data(pack1_sha) as p:
- filename = os.path.join(self.tempdir, "v2test.idx")
- p.create_index_v2(filename)
- idx1 = load_pack_index(filename)
- idx2 = self.get_pack_index(pack1_sha)
- self.assertEqual(oct(os.stat(filename).st_mode), indexmode)
- self.assertEqual(idx1, idx2)
-
- def test_compute_file_sha(self):
- f = BytesIO(b"abcd1234wxyz")
- self.assertEqual(
- sha1(b"abcd1234wxyz").hexdigest(), compute_file_sha(f).hexdigest()
- )
- self.assertEqual(
- sha1(b"abcd1234wxyz").hexdigest(),
- compute_file_sha(f, buffer_size=5).hexdigest(),
- )
- self.assertEqual(
- sha1(b"abcd1234").hexdigest(),
- compute_file_sha(f, end_ofs=-4).hexdigest(),
- )
- self.assertEqual(
- sha1(b"1234wxyz").hexdigest(),
- compute_file_sha(f, start_ofs=4).hexdigest(),
- )
- self.assertEqual(
- sha1(b"1234").hexdigest(),
- compute_file_sha(f, start_ofs=4, end_ofs=-4).hexdigest(),
- )
-
- def test_compute_file_sha_short_file(self):
- f = BytesIO(b"abcd1234wxyz")
- self.assertRaises(AssertionError, compute_file_sha, f, end_ofs=-20)
- self.assertRaises(AssertionError, compute_file_sha, f, end_ofs=20)
- self.assertRaises(
- AssertionError, compute_file_sha, f, start_ofs=10, end_ofs=-12
- )
-
-
-class TestPack(PackTests):
- def test_len(self):
- with self.get_pack(pack1_sha) as p:
- self.assertEqual(3, len(p))
-
- def test_contains(self):
- with self.get_pack(pack1_sha) as p:
- self.assertIn(tree_sha, p)
-
- def test_get(self):
- with self.get_pack(pack1_sha) as p:
- self.assertEqual(type(p[tree_sha]), Tree)
-
- def test_iter(self):
- with self.get_pack(pack1_sha) as p:
- self.assertEqual({tree_sha, commit_sha, a_sha}, set(p))
-
- def test_iterobjects(self):
- with self.get_pack(pack1_sha) as p:
- expected = {p[s] for s in [commit_sha, tree_sha, a_sha]}
- self.assertEqual(expected, set(list(p.iterobjects())))
-
- def test_pack_tuples(self):
- with self.get_pack(pack1_sha) as p:
- tuples = p.pack_tuples()
- expected = {(p[s], None) for s in [commit_sha, tree_sha, a_sha]}
- self.assertEqual(expected, set(list(tuples)))
- self.assertEqual(expected, set(list(tuples)))
- self.assertEqual(3, len(tuples))
-
- def test_get_object_at(self):
- """Tests random access for non-delta objects."""
- with self.get_pack(pack1_sha) as p:
- obj = p[a_sha]
- self.assertEqual(obj.type_name, b"blob")
- self.assertEqual(obj.sha().hexdigest().encode("ascii"), a_sha)
- obj = p[tree_sha]
- self.assertEqual(obj.type_name, b"tree")
- self.assertEqual(obj.sha().hexdigest().encode("ascii"), tree_sha)
- obj = p[commit_sha]
- self.assertEqual(obj.type_name, b"commit")
- self.assertEqual(obj.sha().hexdigest().encode("ascii"), commit_sha)
-
- def test_copy(self):
- with self.get_pack(pack1_sha) as origpack:
- self.assertSucceeds(origpack.index.check)
- basename = os.path.join(self.tempdir, "Elch")
- write_pack(basename, origpack.pack_tuples())
-
- with Pack(basename) as newpack:
- self.assertEqual(origpack, newpack)
- self.assertSucceeds(newpack.index.check)
- self.assertEqual(origpack.name(), newpack.name())
- self.assertEqual(
- origpack.index.get_pack_checksum(),
- newpack.index.get_pack_checksum(),
- )
-
- wrong_version = origpack.index.version != newpack.index.version
- orig_checksum = origpack.index.get_stored_checksum()
- new_checksum = newpack.index.get_stored_checksum()
- self.assertTrue(wrong_version or orig_checksum == new_checksum)
-
- def test_commit_obj(self):
- with self.get_pack(pack1_sha) as p:
- commit = p[commit_sha]
- self.assertEqual(b"James Westby <jw+debian@jameswestby.net>", commit.author)
- self.assertEqual([], commit.parents)
-
- def _copy_pack(self, origpack):
- basename = os.path.join(self.tempdir, "somepack")
- write_pack(basename, origpack.pack_tuples())
- return Pack(basename)
-
- def test_keep_no_message(self):
- with self.get_pack(pack1_sha) as p:
- p = self._copy_pack(p)
-
- with p:
- keepfile_name = p.keep()
-
- # file should exist
- self.assertTrue(os.path.exists(keepfile_name))
-
- with open(keepfile_name) as f:
- buf = f.read()
- self.assertEqual("", buf)
-
- def test_keep_message(self):
- with self.get_pack(pack1_sha) as p:
- p = self._copy_pack(p)
-
- msg = b"some message"
- with p:
- keepfile_name = p.keep(msg)
-
- # file should exist
- self.assertTrue(os.path.exists(keepfile_name))
-
- # and contain the right message, with a linefeed
- with open(keepfile_name, "rb") as f:
- buf = f.read()
- self.assertEqual(msg + b"\n", buf)
-
- def test_name(self):
- with self.get_pack(pack1_sha) as p:
- self.assertEqual(pack1_sha, p.name())
-
- def test_length_mismatch(self):
- with self.get_pack_data(pack1_sha) as data:
- index = self.get_pack_index(pack1_sha)
- Pack.from_objects(data, index).check_length_and_checksum()
-
- data._file.seek(12)
- bad_file = BytesIO()
- write_pack_header(bad_file.write, 9999)
- bad_file.write(data._file.read())
- bad_file = BytesIO(bad_file.getvalue())
- bad_data = PackData("", file=bad_file)
- bad_pack = Pack.from_lazy_objects(lambda: bad_data, lambda: index)
- self.assertRaises(AssertionError, lambda: bad_pack.data)
- self.assertRaises(AssertionError, bad_pack.check_length_and_checksum)
-
- def test_checksum_mismatch(self):
- with self.get_pack_data(pack1_sha) as data:
- index = self.get_pack_index(pack1_sha)
- Pack.from_objects(data, index).check_length_and_checksum()
-
- data._file.seek(0)
- bad_file = BytesIO(data._file.read()[:-20] + (b"\xff" * 20))
- bad_data = PackData("", file=bad_file)
- bad_pack = Pack.from_lazy_objects(lambda: bad_data, lambda: index)
- self.assertRaises(ChecksumMismatch, lambda: bad_pack.data)
- self.assertRaises(ChecksumMismatch, bad_pack.check_length_and_checksum)
-
- def test_iterobjects_2(self):
- with self.get_pack(pack1_sha) as p:
- objs = {o.id: o for o in p.iterobjects()}
- self.assertEqual(3, len(objs))
- self.assertEqual(sorted(objs), sorted(p.index))
- self.assertIsInstance(objs[a_sha], Blob)
- self.assertIsInstance(objs[tree_sha], Tree)
- self.assertIsInstance(objs[commit_sha], Commit)
-
- def test_iterobjects_subset(self):
- with self.get_pack(pack1_sha) as p:
- objs = {o.id: o for o in p.iterobjects_subset([commit_sha])}
- self.assertEqual(1, len(objs))
- self.assertIsInstance(objs[commit_sha], Commit)
-
-
-class TestThinPack(PackTests):
- def setUp(self):
- super().setUp()
- self.store = MemoryObjectStore()
- self.blobs = {}
- for blob in (b"foo", b"bar", b"foo1234", b"bar2468"):
- self.blobs[blob] = make_object(Blob, data=blob)
- self.store.add_object(self.blobs[b"foo"])
- self.store.add_object(self.blobs[b"bar"])
-
- # Build a thin pack. 'foo' is as an external reference, 'bar' an
- # internal reference.
- self.pack_dir = tempfile.mkdtemp()
- self.addCleanup(shutil.rmtree, self.pack_dir)
- self.pack_prefix = os.path.join(self.pack_dir, "pack")
-
- with open(self.pack_prefix + ".pack", "wb") as f:
- build_pack(
- f,
- [
- (REF_DELTA, (self.blobs[b"foo"].id, b"foo1234")),
- (Blob.type_num, b"bar"),
- (REF_DELTA, (self.blobs[b"bar"].id, b"bar2468")),
- ],
- store=self.store,
- )
-
- # Index the new pack.
- with self.make_pack(True) as pack:
- with PackData(pack._data_path) as data:
- data.create_index(
- self.pack_prefix + ".idx", resolve_ext_ref=pack.resolve_ext_ref
- )
-
- del self.store[self.blobs[b"bar"].id]
-
- def make_pack(self, resolve_ext_ref):
- return Pack(
- self.pack_prefix,
- resolve_ext_ref=self.store.get_raw if resolve_ext_ref else None,
- )
-
- def test_get_raw(self):
- with self.make_pack(False) as p:
- self.assertRaises(KeyError, p.get_raw, self.blobs[b"foo1234"].id)
- with self.make_pack(True) as p:
- self.assertEqual((3, b"foo1234"), p.get_raw(self.blobs[b"foo1234"].id))
-
- def test_get_unpacked_object(self):
- self.maxDiff = None
- with self.make_pack(False) as p:
- expected = UnpackedObject(
- 7,
- delta_base=b"\x19\x10(\x15f=#\xf8\xb7ZG\xe7\xa0\x19e\xdc\xdc\x96F\x8c",
- decomp_chunks=[b"\x03\x07\x90\x03\x041234"],
- )
- expected.offset = 12
- got = p.get_unpacked_object(self.blobs[b"foo1234"].id)
- self.assertEqual(expected, got)
- with self.make_pack(True) as p:
- expected = UnpackedObject(
- 7,
- delta_base=b"\x19\x10(\x15f=#\xf8\xb7ZG\xe7\xa0\x19e\xdc\xdc\x96F\x8c",
- decomp_chunks=[b"\x03\x07\x90\x03\x041234"],
- )
- expected.offset = 12
- got = p.get_unpacked_object(self.blobs[b"foo1234"].id)
- self.assertEqual(
- expected,
- got,
- )
-
- def test_iterobjects(self):
- with self.make_pack(False) as p:
- self.assertRaises(UnresolvedDeltas, list, p.iterobjects())
- with self.make_pack(True) as p:
- self.assertEqual(
- sorted(
- [
- self.blobs[b"foo1234"].id,
- self.blobs[b"bar"].id,
- self.blobs[b"bar2468"].id,
- ]
- ),
- sorted(o.id for o in p.iterobjects()),
- )
-
-
-class WritePackTests(TestCase):
- def test_write_pack_header(self):
- f = BytesIO()
- write_pack_header(f.write, 42)
- self.assertEqual(b"PACK\x00\x00\x00\x02\x00\x00\x00*", f.getvalue())
-
- def test_write_pack_object(self):
- f = BytesIO()
- f.write(b"header")
- offset = f.tell()
- crc32 = write_pack_object(f.write, Blob.type_num, b"blob")
- self.assertEqual(crc32, zlib.crc32(f.getvalue()[6:]) & 0xFFFFFFFF)
-
- f.write(b"x") # unpack_object needs extra trailing data.
- f.seek(offset)
- unpacked, unused = unpack_object(f.read, compute_crc32=True)
- self.assertEqual(Blob.type_num, unpacked.pack_type_num)
- self.assertEqual(Blob.type_num, unpacked.obj_type_num)
- self.assertEqual([b"blob"], unpacked.decomp_chunks)
- self.assertEqual(crc32, unpacked.crc32)
- self.assertEqual(b"x", unused)
-
- def test_write_pack_object_sha(self):
- f = BytesIO()
- f.write(b"header")
- offset = f.tell()
- sha_a = sha1(b"foo")
- sha_b = sha_a.copy()
- write_pack_object(f.write, Blob.type_num, b"blob", sha=sha_a)
- self.assertNotEqual(sha_a.digest(), sha_b.digest())
- sha_b.update(f.getvalue()[offset:])
- self.assertEqual(sha_a.digest(), sha_b.digest())
-
- def test_write_pack_object_compression_level(self):
- f = BytesIO()
- f.write(b"header")
- offset = f.tell()
- sha_a = sha1(b"foo")
- sha_b = sha_a.copy()
- write_pack_object(
- f.write, Blob.type_num, b"blob", sha=sha_a, compression_level=6
- )
- self.assertNotEqual(sha_a.digest(), sha_b.digest())
- sha_b.update(f.getvalue()[offset:])
- self.assertEqual(sha_a.digest(), sha_b.digest())
-
-
-pack_checksum = hex_to_sha("721980e866af9a5f93ad674144e1459b8ba3e7b7")
-
-
-class BaseTestPackIndexWriting:
- def assertSucceeds(self, func, *args, **kwargs):
- try:
- func(*args, **kwargs)
- except ChecksumMismatch as e:
- self.fail(e)
-
- def index(self, filename, entries, pack_checksum):
- raise NotImplementedError(self.index)
-
- def test_empty(self):
- idx = self.index("empty.idx", [], pack_checksum)
- self.assertEqual(idx.get_pack_checksum(), pack_checksum)
- self.assertEqual(0, len(idx))
-
- def test_large(self):
- entry1_sha = hex_to_sha("4e6388232ec39792661e2e75db8fb117fc869ce6")
- entry2_sha = hex_to_sha("e98f071751bd77f59967bfa671cd2caebdccc9a2")
- entries = [
- (entry1_sha, 0xF2972D0830529B87, 24),
- (entry2_sha, (~0xF2972D0830529B87) & (2**64 - 1), 92),
- ]
- if not self._supports_large:
- self.assertRaises(
- TypeError, self.index, "single.idx", entries, pack_checksum
- )
- return
- idx = self.index("single.idx", entries, pack_checksum)
- self.assertEqual(idx.get_pack_checksum(), pack_checksum)
- self.assertEqual(2, len(idx))
- actual_entries = list(idx.iterentries())
- self.assertEqual(len(entries), len(actual_entries))
- for mine, actual in zip(entries, actual_entries):
- my_sha, my_offset, my_crc = mine
- actual_sha, actual_offset, actual_crc = actual
- self.assertEqual(my_sha, actual_sha)
- self.assertEqual(my_offset, actual_offset)
- if self._has_crc32_checksum:
- self.assertEqual(my_crc, actual_crc)
- else:
- self.assertIsNone(actual_crc)
-
- def test_single(self):
- entry_sha = hex_to_sha("6f670c0fb53f9463760b7295fbb814e965fb20c8")
- my_entries = [(entry_sha, 178, 42)]
- idx = self.index("single.idx", my_entries, pack_checksum)
- self.assertEqual(idx.get_pack_checksum(), pack_checksum)
- self.assertEqual(1, len(idx))
- actual_entries = list(idx.iterentries())
- self.assertEqual(len(my_entries), len(actual_entries))
- for mine, actual in zip(my_entries, actual_entries):
- my_sha, my_offset, my_crc = mine
- actual_sha, actual_offset, actual_crc = actual
- self.assertEqual(my_sha, actual_sha)
- self.assertEqual(my_offset, actual_offset)
- if self._has_crc32_checksum:
- self.assertEqual(my_crc, actual_crc)
- else:
- self.assertIsNone(actual_crc)
-
-
-class BaseTestFilePackIndexWriting(BaseTestPackIndexWriting):
- def setUp(self):
- self.tempdir = tempfile.mkdtemp()
-
- def tearDown(self):
- shutil.rmtree(self.tempdir)
-
- def index(self, filename, entries, pack_checksum):
- path = os.path.join(self.tempdir, filename)
- self.writeIndex(path, entries, pack_checksum)
- idx = load_pack_index(path)
- self.assertSucceeds(idx.check)
- self.assertEqual(idx.version, self._expected_version)
- return idx
-
- def writeIndex(self, filename, entries, pack_checksum):
- # FIXME: Write to BytesIO instead rather than hitting disk ?
- with GitFile(filename, "wb") as f:
- self._write_fn(f, entries, pack_checksum)
-
-
-class TestMemoryIndexWriting(TestCase, BaseTestPackIndexWriting):
- def setUp(self):
- TestCase.setUp(self)
- self._has_crc32_checksum = True
- self._supports_large = True
-
- def index(self, filename, entries, pack_checksum):
- return MemoryPackIndex(entries, pack_checksum)
-
- def tearDown(self):
- TestCase.tearDown(self)
-
-
-class TestPackIndexWritingv1(TestCase, BaseTestFilePackIndexWriting):
- def setUp(self):
- TestCase.setUp(self)
- BaseTestFilePackIndexWriting.setUp(self)
- self._has_crc32_checksum = False
- self._expected_version = 1
- self._supports_large = False
- self._write_fn = write_pack_index_v1
-
- def tearDown(self):
- TestCase.tearDown(self)
- BaseTestFilePackIndexWriting.tearDown(self)
-
-
-class TestPackIndexWritingv2(TestCase, BaseTestFilePackIndexWriting):
- def setUp(self):
- TestCase.setUp(self)
- BaseTestFilePackIndexWriting.setUp(self)
- self._has_crc32_checksum = True
- self._supports_large = True
- self._expected_version = 2
- self._write_fn = write_pack_index_v2
-
- def tearDown(self):
- TestCase.tearDown(self)
- BaseTestFilePackIndexWriting.tearDown(self)
-
-
-class ReadZlibTests(TestCase):
- decomp = (
- b"tree 4ada885c9196b6b6fa08744b5862bf92896fc002\n"
- b"parent None\n"
- b"author Jelmer Vernooij <jelmer@samba.org> 1228980214 +0000\n"
- b"committer Jelmer Vernooij <jelmer@samba.org> 1228980214 +0000\n"
- b"\n"
- b"Provide replacement for mmap()'s offset argument."
- )
- comp = zlib.compress(decomp)
- extra = b"nextobject"
-
- def setUp(self):
- super().setUp()
- self.read = BytesIO(self.comp + self.extra).read
- self.unpacked = UnpackedObject(
- Tree.type_num, decomp_len=len(self.decomp), crc32=0
- )
-
- def test_decompress_size(self):
- good_decomp_len = len(self.decomp)
- self.unpacked.decomp_len = -1
- self.assertRaises(ValueError, read_zlib_chunks, self.read, self.unpacked)
- self.unpacked.decomp_len = good_decomp_len - 1
- self.assertRaises(zlib.error, read_zlib_chunks, self.read, self.unpacked)
- self.unpacked.decomp_len = good_decomp_len + 1
- self.assertRaises(zlib.error, read_zlib_chunks, self.read, self.unpacked)
-
- def test_decompress_truncated(self):
- read = BytesIO(self.comp[:10]).read
- self.assertRaises(zlib.error, read_zlib_chunks, read, self.unpacked)
-
- read = BytesIO(self.comp).read
- self.assertRaises(zlib.error, read_zlib_chunks, read, self.unpacked)
-
- def test_decompress_empty(self):
- unpacked = UnpackedObject(Tree.type_num, decomp_len=0)
- comp = zlib.compress(b"")
- read = BytesIO(comp + self.extra).read
- unused = read_zlib_chunks(read, unpacked)
- self.assertEqual(b"", b"".join(unpacked.decomp_chunks))
- self.assertNotEqual(b"", unused)
- self.assertEqual(self.extra, unused + read())
-
- def test_decompress_no_crc32(self):
- self.unpacked.crc32 = None
- read_zlib_chunks(self.read, self.unpacked)
- self.assertEqual(None, self.unpacked.crc32)
-
- def _do_decompress_test(self, buffer_size, **kwargs):
- unused = read_zlib_chunks(
- self.read, self.unpacked, buffer_size=buffer_size, **kwargs
- )
- self.assertEqual(self.decomp, b"".join(self.unpacked.decomp_chunks))
- self.assertEqual(zlib.crc32(self.comp), self.unpacked.crc32)
- self.assertNotEqual(b"", unused)
- self.assertEqual(self.extra, unused + self.read())
-
- def test_simple_decompress(self):
- self._do_decompress_test(4096)
- self.assertEqual(None, self.unpacked.comp_chunks)
-
- # These buffer sizes are not intended to be realistic, but rather simulate
- # larger buffer sizes that may end at various places.
- def test_decompress_buffer_size_1(self):
- self._do_decompress_test(1)
-
- def test_decompress_buffer_size_2(self):
- self._do_decompress_test(2)
-
- def test_decompress_buffer_size_3(self):
- self._do_decompress_test(3)
-
- def test_decompress_buffer_size_4(self):
- self._do_decompress_test(4)
-
- def test_decompress_include_comp(self):
- self._do_decompress_test(4096, include_comp=True)
- self.assertEqual(self.comp, b"".join(self.unpacked.comp_chunks))
-
-
-class DeltifyTests(TestCase):
- def test_empty(self):
- self.assertEqual([], list(deltify_pack_objects([])))
-
- def test_single(self):
- b = Blob.from_string(b"foo")
- self.assertEqual(
- [
- UnpackedObject(
- b.type_num,
- sha=b.sha().digest(),
- delta_base=None,
- decomp_chunks=b.as_raw_chunks(),
- )
- ],
- list(deltify_pack_objects([(b, b"")])),
- )
-
- def test_simple_delta(self):
- b1 = Blob.from_string(b"a" * 101)
- b2 = Blob.from_string(b"a" * 100)
- delta = list(create_delta(b1.as_raw_chunks(), b2.as_raw_chunks()))
- self.assertEqual(
- [
- UnpackedObject(
- b1.type_num,
- sha=b1.sha().digest(),
- delta_base=None,
- decomp_chunks=b1.as_raw_chunks(),
- ),
- UnpackedObject(
- b2.type_num,
- sha=b2.sha().digest(),
- delta_base=b1.sha().digest(),
- decomp_chunks=delta,
- ),
- ],
- list(deltify_pack_objects([(b1, b""), (b2, b"")])),
- )
-
-
-class TestPackStreamReader(TestCase):
- def test_read_objects_emtpy(self):
- f = BytesIO()
- build_pack(f, [])
- reader = PackStreamReader(f.read)
- self.assertEqual(0, len(list(reader.read_objects())))
-
- def test_read_objects(self):
- f = BytesIO()
- entries = build_pack(
- f,
- [
- (Blob.type_num, b"blob"),
- (OFS_DELTA, (0, b"blob1")),
- ],
- )
- reader = PackStreamReader(f.read)
- objects = list(reader.read_objects(compute_crc32=True))
- self.assertEqual(2, len(objects))
-
- unpacked_blob, unpacked_delta = objects
-
- self.assertEqual(entries[0][0], unpacked_blob.offset)
- self.assertEqual(Blob.type_num, unpacked_blob.pack_type_num)
- self.assertEqual(Blob.type_num, unpacked_blob.obj_type_num)
- self.assertEqual(None, unpacked_blob.delta_base)
- self.assertEqual(b"blob", b"".join(unpacked_blob.decomp_chunks))
- self.assertEqual(entries[0][4], unpacked_blob.crc32)
-
- self.assertEqual(entries[1][0], unpacked_delta.offset)
- self.assertEqual(OFS_DELTA, unpacked_delta.pack_type_num)
- self.assertEqual(None, unpacked_delta.obj_type_num)
- self.assertEqual(
- unpacked_delta.offset - unpacked_blob.offset,
- unpacked_delta.delta_base,
- )
- delta = create_delta(b"blob", b"blob1")
- self.assertEqual(b"".join(delta), b"".join(unpacked_delta.decomp_chunks))
- self.assertEqual(entries[1][4], unpacked_delta.crc32)
-
- def test_read_objects_buffered(self):
- f = BytesIO()
- build_pack(
- f,
- [
- (Blob.type_num, b"blob"),
- (OFS_DELTA, (0, b"blob1")),
- ],
- )
- reader = PackStreamReader(f.read, zlib_bufsize=4)
- self.assertEqual(2, len(list(reader.read_objects())))
-
- def test_read_objects_empty(self):
- reader = PackStreamReader(BytesIO().read)
- self.assertRaises(AssertionError, list, reader.read_objects())
-
-
-class TestPackIterator(DeltaChainIterator):
- _compute_crc32 = True
-
- def __init__(self, *args, **kwargs) -> None:
- super().__init__(*args, **kwargs)
- self._unpacked_offsets: Set[int] = set()
-
- def _result(self, unpacked):
- """Return entries in the same format as build_pack."""
- return (
- unpacked.offset,
- unpacked.obj_type_num,
- b"".join(unpacked.obj_chunks),
- unpacked.sha(),
- unpacked.crc32,
- )
-
- def _resolve_object(self, offset, pack_type_num, base_chunks):
- assert offset not in self._unpacked_offsets, (
- "Attempted to re-inflate offset %i" % offset
- )
- self._unpacked_offsets.add(offset)
- return super()._resolve_object(offset, pack_type_num, base_chunks)
-
-
-class DeltaChainIteratorTests(TestCase):
- def setUp(self):
- super().setUp()
- self.store = MemoryObjectStore()
- self.fetched = set()
-
- def store_blobs(self, blobs_data):
- blobs = []
- for data in blobs_data:
- blob = make_object(Blob, data=data)
- blobs.append(blob)
- self.store.add_object(blob)
- return blobs
-
- def get_raw_no_repeat(self, bin_sha):
- """Wrapper around store.get_raw that doesn't allow repeat lookups."""
- hex_sha = sha_to_hex(bin_sha)
- self.assertNotIn(
- hex_sha, self.fetched, "Attempted to re-fetch object %s" % hex_sha
- )
- self.fetched.add(hex_sha)
- return self.store.get_raw(hex_sha)
-
- def make_pack_iter(self, f, thin=None):
- if thin is None:
- thin = bool(list(self.store))
- resolve_ext_ref = thin and self.get_raw_no_repeat or None
- data = PackData("test.pack", file=f)
- return TestPackIterator.for_pack_data(data, resolve_ext_ref=resolve_ext_ref)
-
- def make_pack_iter_subset(self, f, subset, thin=None):
- if thin is None:
- thin = bool(list(self.store))
- resolve_ext_ref = thin and self.get_raw_no_repeat or None
- data = PackData("test.pack", file=f)
- assert data
- index = MemoryPackIndex.for_pack(data)
- pack = Pack.from_objects(data, index)
- return TestPackIterator.for_pack_subset(
- pack, subset, resolve_ext_ref=resolve_ext_ref
- )
-
- def assertEntriesMatch(self, expected_indexes, entries, pack_iter):
- expected = [entries[i] for i in expected_indexes]
- self.assertEqual(expected, list(pack_iter._walk_all_chains()))
-
- def test_no_deltas(self):
- f = BytesIO()
- entries = build_pack(
- f,
- [
- (Commit.type_num, b"commit"),
- (Blob.type_num, b"blob"),
- (Tree.type_num, b"tree"),
- ],
- )
- self.assertEntriesMatch([0, 1, 2], entries, self.make_pack_iter(f))
- f.seek(0)
- self.assertEntriesMatch([], entries, self.make_pack_iter_subset(f, []))
- f.seek(0)
- self.assertEntriesMatch(
- [1, 0],
- entries,
- self.make_pack_iter_subset(f, [entries[0][3], entries[1][3]]),
- )
- f.seek(0)
- self.assertEntriesMatch(
- [1, 0],
- entries,
- self.make_pack_iter_subset(
- f, [sha_to_hex(entries[0][3]), sha_to_hex(entries[1][3])]
- ),
- )
-
- def test_ofs_deltas(self):
- f = BytesIO()
- entries = build_pack(
- f,
- [
- (Blob.type_num, b"blob"),
- (OFS_DELTA, (0, b"blob1")),
- (OFS_DELTA, (0, b"blob2")),
- ],
- )
- # Delta resolution changed to DFS
- self.assertEntriesMatch([0, 2, 1], entries, self.make_pack_iter(f))
- f.seek(0)
- self.assertEntriesMatch(
- [0, 2, 1],
- entries,
- self.make_pack_iter_subset(f, [entries[1][3], entries[2][3]]),
- )
-
- def test_ofs_deltas_chain(self):
- f = BytesIO()
- entries = build_pack(
- f,
- [
- (Blob.type_num, b"blob"),
- (OFS_DELTA, (0, b"blob1")),
- (OFS_DELTA, (1, b"blob2")),
- ],
- )
- self.assertEntriesMatch([0, 1, 2], entries, self.make_pack_iter(f))
-
- def test_ref_deltas(self):
- f = BytesIO()
- entries = build_pack(
- f,
- [
- (REF_DELTA, (1, b"blob1")),
- (Blob.type_num, (b"blob")),
- (REF_DELTA, (1, b"blob2")),
- ],
- )
- # Delta resolution changed to DFS
- self.assertEntriesMatch([1, 2, 0], entries, self.make_pack_iter(f))
-
- def test_ref_deltas_chain(self):
- f = BytesIO()
- entries = build_pack(
- f,
- [
- (REF_DELTA, (2, b"blob1")),
- (Blob.type_num, (b"blob")),
- (REF_DELTA, (1, b"blob2")),
- ],
- )
- self.assertEntriesMatch([1, 2, 0], entries, self.make_pack_iter(f))
-
- def test_ofs_and_ref_deltas(self):
- # Deltas pending on this offset are popped before deltas depending on
- # this ref.
- f = BytesIO()
- entries = build_pack(
- f,
- [
- (REF_DELTA, (1, b"blob1")),
- (Blob.type_num, (b"blob")),
- (OFS_DELTA, (1, b"blob2")),
- ],
- )
-
- # Delta resolution changed to DFS
- self.assertEntriesMatch([1, 0, 2], entries, self.make_pack_iter(f))
-
- def test_mixed_chain(self):
- f = BytesIO()
- entries = build_pack(
- f,
- [
- (Blob.type_num, b"blob"),
- (REF_DELTA, (2, b"blob2")),
- (OFS_DELTA, (0, b"blob1")),
- (OFS_DELTA, (1, b"blob3")),
- (OFS_DELTA, (0, b"bob")),
- ],
- )
- # Delta resolution changed to DFS
- self.assertEntriesMatch([0, 4, 2, 1, 3], entries, self.make_pack_iter(f))
-
- def test_long_chain(self):
- n = 100
- objects_spec = [(Blob.type_num, b"blob")]
- for i in range(n):
- objects_spec.append((OFS_DELTA, (i, b"blob" + str(i).encode("ascii"))))
- f = BytesIO()
- entries = build_pack(f, objects_spec)
- self.assertEntriesMatch(range(n + 1), entries, self.make_pack_iter(f))
-
- def test_branchy_chain(self):
- n = 100
- objects_spec = [(Blob.type_num, b"blob")]
- for i in range(n):
- objects_spec.append((OFS_DELTA, (0, b"blob" + str(i).encode("ascii"))))
- f = BytesIO()
- entries = build_pack(f, objects_spec)
- # Delta resolution changed to DFS
- indices = [0, *list(range(100, 0, -1))]
- self.assertEntriesMatch(indices, entries, self.make_pack_iter(f))
-
- def test_ext_ref(self):
- (blob,) = self.store_blobs([b"blob"])
- f = BytesIO()
- entries = build_pack(f, [(REF_DELTA, (blob.id, b"blob1"))], store=self.store)
- pack_iter = self.make_pack_iter(f)
- self.assertEntriesMatch([0], entries, pack_iter)
- self.assertEqual([hex_to_sha(blob.id)], pack_iter.ext_refs())
-
- def test_ext_ref_chain(self):
- (blob,) = self.store_blobs([b"blob"])
- f = BytesIO()
- entries = build_pack(
- f,
- [
- (REF_DELTA, (1, b"blob2")),
- (REF_DELTA, (blob.id, b"blob1")),
- ],
- store=self.store,
- )
- pack_iter = self.make_pack_iter(f)
- self.assertEntriesMatch([1, 0], entries, pack_iter)
- self.assertEqual([hex_to_sha(blob.id)], pack_iter.ext_refs())
-
- def test_ext_ref_chain_degenerate(self):
- # Test a degenerate case where the sender is sending a REF_DELTA
- # object that expands to an object already in the repository.
- (blob,) = self.store_blobs([b"blob"])
- (blob2,) = self.store_blobs([b"blob2"])
- assert blob.id < blob2.id
-
- f = BytesIO()
- entries = build_pack(
- f,
- [
- (REF_DELTA, (blob.id, b"blob2")),
- (REF_DELTA, (0, b"blob3")),
- ],
- store=self.store,
- )
- pack_iter = self.make_pack_iter(f)
- self.assertEntriesMatch([0, 1], entries, pack_iter)
- self.assertEqual([hex_to_sha(blob.id)], pack_iter.ext_refs())
-
- def test_ext_ref_multiple_times(self):
- (blob,) = self.store_blobs([b"blob"])
- f = BytesIO()
- entries = build_pack(
- f,
- [
- (REF_DELTA, (blob.id, b"blob1")),
- (REF_DELTA, (blob.id, b"blob2")),
- ],
- store=self.store,
- )
- pack_iter = self.make_pack_iter(f)
- self.assertEntriesMatch([0, 1], entries, pack_iter)
- self.assertEqual([hex_to_sha(blob.id)], pack_iter.ext_refs())
-
- def test_multiple_ext_refs(self):
- b1, b2 = self.store_blobs([b"foo", b"bar"])
- f = BytesIO()
- entries = build_pack(
- f,
- [
- (REF_DELTA, (b1.id, b"foo1")),
- (REF_DELTA, (b2.id, b"bar2")),
- ],
- store=self.store,
- )
- pack_iter = self.make_pack_iter(f)
- self.assertEntriesMatch([0, 1], entries, pack_iter)
- self.assertEqual([hex_to_sha(b1.id), hex_to_sha(b2.id)], pack_iter.ext_refs())
-
- def test_bad_ext_ref_non_thin_pack(self):
- (blob,) = self.store_blobs([b"blob"])
- f = BytesIO()
- build_pack(f, [(REF_DELTA, (blob.id, b"blob1"))], store=self.store)
- pack_iter = self.make_pack_iter(f, thin=False)
- try:
- list(pack_iter._walk_all_chains())
- self.fail()
- except UnresolvedDeltas as e:
- self.assertEqual([blob.id], e.shas)
-
- def test_bad_ext_ref_thin_pack(self):
- b1, b2, b3 = self.store_blobs([b"foo", b"bar", b"baz"])
- f = BytesIO()
- build_pack(
- f,
- [
- (REF_DELTA, (1, b"foo99")),
- (REF_DELTA, (b1.id, b"foo1")),
- (REF_DELTA, (b2.id, b"bar2")),
- (REF_DELTA, (b3.id, b"baz3")),
- ],
- store=self.store,
- )
- del self.store[b2.id]
- del self.store[b3.id]
- pack_iter = self.make_pack_iter(f)
- try:
- list(pack_iter._walk_all_chains())
- self.fail()
- except UnresolvedDeltas as e:
- self.assertEqual((sorted([b2.id, b3.id]),), (sorted(e.shas),))
-
-
-class DeltaEncodeSizeTests(TestCase):
- def test_basic(self):
- self.assertEqual(b"\x00", _delta_encode_size(0))
- self.assertEqual(b"\x01", _delta_encode_size(1))
- self.assertEqual(b"\xfa\x01", _delta_encode_size(250))
- self.assertEqual(b"\xe8\x07", _delta_encode_size(1000))
- self.assertEqual(b"\xa0\x8d\x06", _delta_encode_size(100000))
-
-
-class EncodeCopyOperationTests(TestCase):
- def test_basic(self):
- self.assertEqual(b"\x80", _encode_copy_operation(0, 0))
- self.assertEqual(b"\x91\x01\x0a", _encode_copy_operation(1, 10))
- self.assertEqual(b"\xb1\x64\xe8\x03", _encode_copy_operation(100, 1000))
- self.assertEqual(b"\x93\xe8\x03\x01", _encode_copy_operation(1000, 1))
blob - 3653f7fbbf0854d334f9627486e01f20fd2b6af0 (mode 644)
blob + /dev/null
--- dulwich/tests/test_patch.py
+++ /dev/null
-# test_patch.py -- tests for patch.py
-# Copyright (C) 2010 Jelmer Vernooij <jelmer@jelmer.uk>
-#
-# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
-# General Public License as public by the Free Software Foundation; version 2.0
-# or (at your option) any later version. You can redistribute it and/or
-# modify it under the terms of either of these two licenses.
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# You should have received a copy of the licenses; if not, see
-# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
-# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
-# License, Version 2.0.
-#
-
-"""Tests for patch.py."""
-
-from io import BytesIO, StringIO
-
-from dulwich.tests import SkipTest, TestCase
-
-from ..object_store import MemoryObjectStore
-from ..objects import S_IFGITLINK, Blob, Commit, Tree
-from ..patch import (
- get_summary,
- git_am_patch_split,
- write_blob_diff,
- write_commit_patch,
- write_object_diff,
- write_tree_diff,
-)
-
-
-class WriteCommitPatchTests(TestCase):
- def test_simple_bytesio(self):
- f = BytesIO()
- c = Commit()
- c.committer = c.author = b"Jelmer <jelmer@samba.org>"
- c.commit_time = c.author_time = 1271350201
- c.commit_timezone = c.author_timezone = 0
- c.message = b"This is the first line\nAnd this is the second line.\n"
- c.tree = Tree().id
- write_commit_patch(f, c, b"CONTENTS", (1, 1), version="custom")
- f.seek(0)
- lines = f.readlines()
- self.assertTrue(
- lines[0].startswith(b"From 0b0d34d1b5b596c928adc9a727a4b9e03d025298")
- )
- self.assertEqual(lines[1], b"From: Jelmer <jelmer@samba.org>\n")
- self.assertTrue(lines[2].startswith(b"Date: "))
- self.assertEqual(
- [
- b"Subject: [PATCH 1/1] This is the first line\n",
- b"And this is the second line.\n",
- b"\n",
- b"\n",
- b"---\n",
- ],
- lines[3:8],
- )
- self.assertEqual([b"CONTENTS-- \n", b"custom\n"], lines[-2:])
- if len(lines) >= 12:
- # diffstat may not be present
- self.assertEqual(lines[8], b" 0 files changed\n")
-
-
-class ReadGitAmPatch(TestCase):
- def test_extract_string(self):
- text = b"""\
-From ff643aae102d8870cac88e8f007e70f58f3a7363 Mon Sep 17 00:00:00 2001
-From: Jelmer Vernooij <jelmer@samba.org>
-Date: Thu, 15 Apr 2010 15:40:28 +0200
-Subject: [PATCH 1/2] Remove executable bit from prey.ico (triggers a warning).
-
----
- pixmaps/prey.ico | Bin 9662 -> 9662 bytes
- 1 files changed, 0 insertions(+), 0 deletions(-)
- mode change 100755 => 100644 pixmaps/prey.ico
-
---
-1.7.0.4
-"""
- c, diff, version = git_am_patch_split(StringIO(text.decode("utf-8")), "utf-8")
- self.assertEqual(b"Jelmer Vernooij <jelmer@samba.org>", c.committer)
- self.assertEqual(b"Jelmer Vernooij <jelmer@samba.org>", c.author)
- self.assertEqual(
- b"Remove executable bit from prey.ico " b"(triggers a warning).\n",
- c.message,
- )
- self.assertEqual(
- b""" pixmaps/prey.ico | Bin 9662 -> 9662 bytes
- 1 files changed, 0 insertions(+), 0 deletions(-)
- mode change 100755 => 100644 pixmaps/prey.ico
-
-""",
- diff,
- )
- self.assertEqual(b"1.7.0.4", version)
-
- def test_extract_bytes(self):
- text = b"""\
-From ff643aae102d8870cac88e8f007e70f58f3a7363 Mon Sep 17 00:00:00 2001
-From: Jelmer Vernooij <jelmer@samba.org>
-Date: Thu, 15 Apr 2010 15:40:28 +0200
-Subject: [PATCH 1/2] Remove executable bit from prey.ico (triggers a warning).
-
----
- pixmaps/prey.ico | Bin 9662 -> 9662 bytes
- 1 files changed, 0 insertions(+), 0 deletions(-)
- mode change 100755 => 100644 pixmaps/prey.ico
-
---
-1.7.0.4
-"""
- c, diff, version = git_am_patch_split(BytesIO(text))
- self.assertEqual(b"Jelmer Vernooij <jelmer@samba.org>", c.committer)
- self.assertEqual(b"Jelmer Vernooij <jelmer@samba.org>", c.author)
- self.assertEqual(
- b"Remove executable bit from prey.ico " b"(triggers a warning).\n",
- c.message,
- )
- self.assertEqual(
- b""" pixmaps/prey.ico | Bin 9662 -> 9662 bytes
- 1 files changed, 0 insertions(+), 0 deletions(-)
- mode change 100755 => 100644 pixmaps/prey.ico
-
-""",
- diff,
- )
- self.assertEqual(b"1.7.0.4", version)
-
- def test_extract_spaces(self):
- text = b"""From ff643aae102d8870cac88e8f007e70f58f3a7363 Mon Sep 17 00:00:00 2001
-From: Jelmer Vernooij <jelmer@samba.org>
-Date: Thu, 15 Apr 2010 15:40:28 +0200
-Subject: [Dulwich-users] [PATCH] Added unit tests for
- dulwich.object_store.tree_lookup_path.
-
-* dulwich/tests/test_object_store.py
- (TreeLookupPathTests): This test case contains a few tests that ensure the
- tree_lookup_path function works as expected.
----
- pixmaps/prey.ico | Bin 9662 -> 9662 bytes
- 1 files changed, 0 insertions(+), 0 deletions(-)
- mode change 100755 => 100644 pixmaps/prey.ico
-
---
-1.7.0.4
-"""
- c, diff, version = git_am_patch_split(BytesIO(text), "utf-8")
- self.assertEqual(
- b"""\
-Added unit tests for dulwich.object_store.tree_lookup_path.
-
-* dulwich/tests/test_object_store.py
- (TreeLookupPathTests): This test case contains a few tests that ensure the
- tree_lookup_path function works as expected.
-""",
- c.message,
- )
-
- def test_extract_pseudo_from_header(self):
- text = b"""From ff643aae102d8870cac88e8f007e70f58f3a7363 Mon Sep 17 00:00:00 2001
-From: Jelmer Vernooij <jelmer@samba.org>
-Date: Thu, 15 Apr 2010 15:40:28 +0200
-Subject: [Dulwich-users] [PATCH] Added unit tests for
- dulwich.object_store.tree_lookup_path.
-
-From: Jelmer Vernooij <jelmer@debian.org>
-
-* dulwich/tests/test_object_store.py
- (TreeLookupPathTests): This test case contains a few tests that ensure the
- tree_lookup_path function works as expected.
----
- pixmaps/prey.ico | Bin 9662 -> 9662 bytes
- 1 files changed, 0 insertions(+), 0 deletions(-)
- mode change 100755 => 100644 pixmaps/prey.ico
-
---
-1.7.0.4
-"""
- c, diff, version = git_am_patch_split(BytesIO(text), "utf-8")
- self.assertEqual(b"Jelmer Vernooij <jelmer@debian.org>", c.author)
- self.assertEqual(
- b"""\
-Added unit tests for dulwich.object_store.tree_lookup_path.
-
-* dulwich/tests/test_object_store.py
- (TreeLookupPathTests): This test case contains a few tests that ensure the
- tree_lookup_path function works as expected.
-""",
- c.message,
- )
-
- def test_extract_no_version_tail(self):
- text = b"""\
-From ff643aae102d8870cac88e8f007e70f58f3a7363 Mon Sep 17 00:00:00 2001
-From: Jelmer Vernooij <jelmer@samba.org>
-Date: Thu, 15 Apr 2010 15:40:28 +0200
-Subject: [Dulwich-users] [PATCH] Added unit tests for
- dulwich.object_store.tree_lookup_path.
-
-From: Jelmer Vernooij <jelmer@debian.org>
-
----
- pixmaps/prey.ico | Bin 9662 -> 9662 bytes
- 1 files changed, 0 insertions(+), 0 deletions(-)
- mode change 100755 => 100644 pixmaps/prey.ico
-
-"""
- c, diff, version = git_am_patch_split(BytesIO(text), "utf-8")
- self.assertEqual(None, version)
-
- def test_extract_mercurial(self):
- raise SkipTest(
- "git_am_patch_split doesn't handle Mercurial patches " "properly yet"
- )
- expected_diff = """\
-diff --git a/dulwich/tests/test_patch.py b/dulwich/tests/test_patch.py
---- a/dulwich/tests/test_patch.py
-+++ b/dulwich/tests/test_patch.py
-@@ -158,7 +158,7 @@
-
- '''
- c, diff, version = git_am_patch_split(BytesIO(text))
-- self.assertIs(None, version)
-+ self.assertEqual(None, version)
-
-
- class DiffTests(TestCase):
-"""
- text = (
- """\
-From dulwich-users-bounces+jelmer=samba.org@lists.launchpad.net \
-Mon Nov 29 00:58:18 2010
-Date: Sun, 28 Nov 2010 17:57:27 -0600
-From: Augie Fackler <durin42@gmail.com>
-To: dulwich-users <dulwich-users@lists.launchpad.net>
-Subject: [Dulwich-users] [PATCH] test_patch: fix tests on Python 2.6
-Content-Transfer-Encoding: 8bit
-
-Change-Id: I5e51313d4ae3a65c3f00c665002a7489121bb0d6
-
-%s
-
-_______________________________________________
-Mailing list: https://launchpad.net/~dulwich-users
-Post to : dulwich-users@lists.launchpad.net
-Unsubscribe : https://launchpad.net/~dulwich-users
-More help : https://help.launchpad.net/ListHelp
-
-"""
- % expected_diff
- )
- c, diff, version = git_am_patch_split(BytesIO(text))
- self.assertEqual(expected_diff, diff)
- self.assertEqual(None, version)
-
-
-class DiffTests(TestCase):
- """Tests for write_blob_diff and write_tree_diff."""
-
- def test_blob_diff(self):
- f = BytesIO()
- write_blob_diff(
- f,
- (b"foo.txt", 0o644, Blob.from_string(b"old\nsame\n")),
- (b"bar.txt", 0o644, Blob.from_string(b"new\nsame\n")),
- )
- self.assertEqual(
- [
- b"diff --git a/foo.txt b/bar.txt",
- b"index 3b0f961..a116b51 644",
- b"--- a/foo.txt",
- b"+++ b/bar.txt",
- b"@@ -1,2 +1,2 @@",
- b"-old",
- b"+new",
- b" same",
- ],
- f.getvalue().splitlines(),
- )
-
- def test_blob_add(self):
- f = BytesIO()
- write_blob_diff(
- f,
- (None, None, None),
- (b"bar.txt", 0o644, Blob.from_string(b"new\nsame\n")),
- )
- self.assertEqual(
- [
- b"diff --git a/bar.txt b/bar.txt",
- b"new file mode 644",
- b"index 0000000..a116b51",
- b"--- /dev/null",
- b"+++ b/bar.txt",
- b"@@ -0,0 +1,2 @@",
- b"+new",
- b"+same",
- ],
- f.getvalue().splitlines(),
- )
-
- def test_blob_remove(self):
- f = BytesIO()
- write_blob_diff(
- f,
- (b"bar.txt", 0o644, Blob.from_string(b"new\nsame\n")),
- (None, None, None),
- )
- self.assertEqual(
- [
- b"diff --git a/bar.txt b/bar.txt",
- b"deleted file mode 644",
- b"index a116b51..0000000",
- b"--- a/bar.txt",
- b"+++ /dev/null",
- b"@@ -1,2 +0,0 @@",
- b"-new",
- b"-same",
- ],
- f.getvalue().splitlines(),
- )
-
- def test_tree_diff(self):
- f = BytesIO()
- store = MemoryObjectStore()
- added = Blob.from_string(b"add\n")
- removed = Blob.from_string(b"removed\n")
- changed1 = Blob.from_string(b"unchanged\nremoved\n")
- changed2 = Blob.from_string(b"unchanged\nadded\n")
- unchanged = Blob.from_string(b"unchanged\n")
- tree1 = Tree()
- tree1.add(b"removed.txt", 0o644, removed.id)
- tree1.add(b"changed.txt", 0o644, changed1.id)
- tree1.add(b"unchanged.txt", 0o644, changed1.id)
- tree2 = Tree()
- tree2.add(b"added.txt", 0o644, added.id)
- tree2.add(b"changed.txt", 0o644, changed2.id)
- tree2.add(b"unchanged.txt", 0o644, changed1.id)
- store.add_objects(
- [
- (o, None)
- for o in [
- tree1,
- tree2,
- added,
- removed,
- changed1,
- changed2,
- unchanged,
- ]
- ]
- )
- write_tree_diff(f, store, tree1.id, tree2.id)
- self.assertEqual(
- [
- b"diff --git a/added.txt b/added.txt",
- b"new file mode 644",
- b"index 0000000..76d4bb8",
- b"--- /dev/null",
- b"+++ b/added.txt",
- b"@@ -0,0 +1 @@",
- b"+add",
- b"diff --git a/changed.txt b/changed.txt",
- b"index bf84e48..1be2436 644",
- b"--- a/changed.txt",
- b"+++ b/changed.txt",
- b"@@ -1,2 +1,2 @@",
- b" unchanged",
- b"-removed",
- b"+added",
- b"diff --git a/removed.txt b/removed.txt",
- b"deleted file mode 644",
- b"index 2c3f0b3..0000000",
- b"--- a/removed.txt",
- b"+++ /dev/null",
- b"@@ -1 +0,0 @@",
- b"-removed",
- ],
- f.getvalue().splitlines(),
- )
-
- def test_tree_diff_submodule(self):
- f = BytesIO()
- store = MemoryObjectStore()
- tree1 = Tree()
- tree1.add(
- b"asubmodule",
- S_IFGITLINK,
- b"06d0bdd9e2e20377b3180e4986b14c8549b393e4",
- )
- tree2 = Tree()
- tree2.add(
- b"asubmodule",
- S_IFGITLINK,
- b"cc975646af69f279396d4d5e1379ac6af80ee637",
- )
- store.add_objects([(o, None) for o in [tree1, tree2]])
- write_tree_diff(f, store, tree1.id, tree2.id)
- self.assertEqual(
- [
- b"diff --git a/asubmodule b/asubmodule",
- b"index 06d0bdd..cc97564 160000",
- b"--- a/asubmodule",
- b"+++ b/asubmodule",
- b"@@ -1 +1 @@",
- b"-Subproject commit 06d0bdd9e2e20377b3180e4986b14c8549b393e4",
- b"+Subproject commit cc975646af69f279396d4d5e1379ac6af80ee637",
- ],
- f.getvalue().splitlines(),
- )
-
- def test_object_diff_blob(self):
- f = BytesIO()
- b1 = Blob.from_string(b"old\nsame\n")
- b2 = Blob.from_string(b"new\nsame\n")
- store = MemoryObjectStore()
- store.add_objects([(b1, None), (b2, None)])
- write_object_diff(
- f, store, (b"foo.txt", 0o644, b1.id), (b"bar.txt", 0o644, b2.id)
- )
- self.assertEqual(
- [
- b"diff --git a/foo.txt b/bar.txt",
- b"index 3b0f961..a116b51 644",
- b"--- a/foo.txt",
- b"+++ b/bar.txt",
- b"@@ -1,2 +1,2 @@",
- b"-old",
- b"+new",
- b" same",
- ],
- f.getvalue().splitlines(),
- )
-
- def test_object_diff_add_blob(self):
- f = BytesIO()
- store = MemoryObjectStore()
- b2 = Blob.from_string(b"new\nsame\n")
- store.add_object(b2)
- write_object_diff(f, store, (None, None, None), (b"bar.txt", 0o644, b2.id))
- self.assertEqual(
- [
- b"diff --git a/bar.txt b/bar.txt",
- b"new file mode 644",
- b"index 0000000..a116b51",
- b"--- /dev/null",
- b"+++ b/bar.txt",
- b"@@ -0,0 +1,2 @@",
- b"+new",
- b"+same",
- ],
- f.getvalue().splitlines(),
- )
-
- def test_object_diff_remove_blob(self):
- f = BytesIO()
- b1 = Blob.from_string(b"new\nsame\n")
- store = MemoryObjectStore()
- store.add_object(b1)
- write_object_diff(f, store, (b"bar.txt", 0o644, b1.id), (None, None, None))
- self.assertEqual(
- [
- b"diff --git a/bar.txt b/bar.txt",
- b"deleted file mode 644",
- b"index a116b51..0000000",
- b"--- a/bar.txt",
- b"+++ /dev/null",
- b"@@ -1,2 +0,0 @@",
- b"-new",
- b"-same",
- ],
- f.getvalue().splitlines(),
- )
-
- def test_object_diff_bin_blob_force(self):
- f = BytesIO()
- # Prepare two slightly different PNG headers
- b1 = Blob.from_string(
- b"\x89\x50\x4e\x47\x0d\x0a\x1a\x0a"
- b"\x00\x00\x00\x0d\x49\x48\x44\x52"
- b"\x00\x00\x01\xd5\x00\x00\x00\x9f"
- b"\x08\x04\x00\x00\x00\x05\x04\x8b"
- )
- b2 = Blob.from_string(
- b"\x89\x50\x4e\x47\x0d\x0a\x1a\x0a"
- b"\x00\x00\x00\x0d\x49\x48\x44\x52"
- b"\x00\x00\x01\xd5\x00\x00\x00\x9f"
- b"\x08\x03\x00\x00\x00\x98\xd3\xb3"
- )
- store = MemoryObjectStore()
- store.add_objects([(b1, None), (b2, None)])
- write_object_diff(
- f,
- store,
- (b"foo.png", 0o644, b1.id),
- (b"bar.png", 0o644, b2.id),
- diff_binary=True,
- )
- self.assertEqual(
- [
- b"diff --git a/foo.png b/bar.png",
- b"index f73e47d..06364b7 644",
- b"--- a/foo.png",
- b"+++ b/bar.png",
- b"@@ -1,4 +1,4 @@",
- b" \x89PNG",
- b" \x1a",
- b" \x00\x00\x00",
- b"-IHDR\x00\x00\x01\xd5\x00\x00\x00"
- b"\x9f\x08\x04\x00\x00\x00\x05\x04\x8b",
- b"\\ No newline at end of file",
- b"+IHDR\x00\x00\x01\xd5\x00\x00\x00\x9f"
- b"\x08\x03\x00\x00\x00\x98\xd3\xb3",
- b"\\ No newline at end of file",
- ],
- f.getvalue().splitlines(),
- )
-
- def test_object_diff_bin_blob(self):
- f = BytesIO()
- # Prepare two slightly different PNG headers
- b1 = Blob.from_string(
- b"\x89\x50\x4e\x47\x0d\x0a\x1a\x0a"
- b"\x00\x00\x00\x0d\x49\x48\x44\x52"
- b"\x00\x00\x01\xd5\x00\x00\x00\x9f"
- b"\x08\x04\x00\x00\x00\x05\x04\x8b"
- )
- b2 = Blob.from_string(
- b"\x89\x50\x4e\x47\x0d\x0a\x1a\x0a"
- b"\x00\x00\x00\x0d\x49\x48\x44\x52"
- b"\x00\x00\x01\xd5\x00\x00\x00\x9f"
- b"\x08\x03\x00\x00\x00\x98\xd3\xb3"
- )
- store = MemoryObjectStore()
- store.add_objects([(b1, None), (b2, None)])
- write_object_diff(
- f, store, (b"foo.png", 0o644, b1.id), (b"bar.png", 0o644, b2.id)
- )
- self.assertEqual(
- [
- b"diff --git a/foo.png b/bar.png",
- b"index f73e47d..06364b7 644",
- b"Binary files a/foo.png and b/bar.png differ",
- ],
- f.getvalue().splitlines(),
- )
-
- def test_object_diff_add_bin_blob(self):
- f = BytesIO()
- b2 = Blob.from_string(
- b"\x89\x50\x4e\x47\x0d\x0a\x1a\x0a"
- b"\x00\x00\x00\x0d\x49\x48\x44\x52"
- b"\x00\x00\x01\xd5\x00\x00\x00\x9f"
- b"\x08\x03\x00\x00\x00\x98\xd3\xb3"
- )
- store = MemoryObjectStore()
- store.add_object(b2)
- write_object_diff(f, store, (None, None, None), (b"bar.png", 0o644, b2.id))
- self.assertEqual(
- [
- b"diff --git a/bar.png b/bar.png",
- b"new file mode 644",
- b"index 0000000..06364b7",
- b"Binary files /dev/null and b/bar.png differ",
- ],
- f.getvalue().splitlines(),
- )
-
- def test_object_diff_remove_bin_blob(self):
- f = BytesIO()
- b1 = Blob.from_string(
- b"\x89\x50\x4e\x47\x0d\x0a\x1a\x0a"
- b"\x00\x00\x00\x0d\x49\x48\x44\x52"
- b"\x00\x00\x01\xd5\x00\x00\x00\x9f"
- b"\x08\x04\x00\x00\x00\x05\x04\x8b"
- )
- store = MemoryObjectStore()
- store.add_object(b1)
- write_object_diff(f, store, (b"foo.png", 0o644, b1.id), (None, None, None))
- self.assertEqual(
- [
- b"diff --git a/foo.png b/foo.png",
- b"deleted file mode 644",
- b"index f73e47d..0000000",
- b"Binary files a/foo.png and /dev/null differ",
- ],
- f.getvalue().splitlines(),
- )
-
- def test_object_diff_kind_change(self):
- f = BytesIO()
- b1 = Blob.from_string(b"new\nsame\n")
- store = MemoryObjectStore()
- store.add_object(b1)
- write_object_diff(
- f,
- store,
- (b"bar.txt", 0o644, b1.id),
- (
- b"bar.txt",
- 0o160000,
- b"06d0bdd9e2e20377b3180e4986b14c8549b393e4",
- ),
- )
- self.assertEqual(
- [
- b"diff --git a/bar.txt b/bar.txt",
- b"old file mode 644",
- b"new file mode 160000",
- b"index a116b51..06d0bdd 160000",
- b"--- a/bar.txt",
- b"+++ b/bar.txt",
- b"@@ -1,2 +1 @@",
- b"-new",
- b"-same",
- b"+Subproject commit 06d0bdd9e2e20377b3180e4986b14c8549b393e4",
- ],
- f.getvalue().splitlines(),
- )
-
-
-class GetSummaryTests(TestCase):
- def test_simple(self):
- c = Commit()
- c.committer = c.author = b"Jelmer <jelmer@samba.org>"
- c.commit_time = c.author_time = 1271350201
- c.commit_timezone = c.author_timezone = 0
- c.message = b"This is the first line\nAnd this is the second line.\n"
- c.tree = Tree().id
- self.assertEqual("This-is-the-first-line", get_summary(c))
blob - a0a6c6b0b68b991a1453afa3f3487b27479eb82b (mode 644)
blob + /dev/null
--- dulwich/tests/test_porcelain.py
+++ /dev/null
-# test_porcelain.py -- porcelain tests
-# Copyright (C) 2013 Jelmer Vernooij <jelmer@jelmer.uk>
-#
-# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
-# General Public License as public by the Free Software Foundation; version 2.0
-# or (at your option) any later version. You can redistribute it and/or
-# modify it under the terms of either of these two licenses.
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# You should have received a copy of the licenses; if not, see
-# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
-# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
-# License, Version 2.0.
-#
-
-"""Tests for dulwich.porcelain."""
-
-import contextlib
-import os
-import platform
-import re
-import shutil
-import stat
-import subprocess
-import sys
-import tarfile
-import tempfile
-import threading
-import time
-from io import BytesIO, StringIO
-from unittest import skipIf
-
-from dulwich import porcelain
-from dulwich.tests import TestCase
-
-from ..diff_tree import tree_changes
-from ..errors import CommitError
-from ..objects import ZERO_SHA, Blob, Tag, Tree
-from ..porcelain import CheckoutError
-from ..repo import NoIndexPresent, Repo
-from ..server import DictBackend
-from ..web import make_server, make_wsgi_chain
-from .utils import build_commit_graph, make_commit, make_object
-
-try:
- import gpg
-except ImportError:
- gpg = None
-
-
-def flat_walk_dir(dir_to_walk):
- for dirpath, _, filenames in os.walk(dir_to_walk):
- rel_dirpath = os.path.relpath(dirpath, dir_to_walk)
- if not dirpath == dir_to_walk:
- yield rel_dirpath
- for filename in filenames:
- if dirpath == dir_to_walk:
- yield filename
- else:
- yield os.path.join(rel_dirpath, filename)
-
-
-class PorcelainTestCase(TestCase):
- def setUp(self):
- super().setUp()
- self.test_dir = tempfile.mkdtemp()
- self.addCleanup(shutil.rmtree, self.test_dir)
- self.repo_path = os.path.join(self.test_dir, "repo")
- self.repo = Repo.init(self.repo_path, mkdir=True)
- self.addCleanup(self.repo.close)
-
- def assertRecentTimestamp(self, ts):
- # On some slow CIs it does actually take more than 5 seconds to go from
- # creating the tag to here.
- self.assertLess(time.time() - ts, 50)
-
-
-@skipIf(gpg is None, "gpg is not available")
-class PorcelainGpgTestCase(PorcelainTestCase):
- DEFAULT_KEY = """
------BEGIN PGP PRIVATE KEY BLOCK-----
-
-lQVYBGBjIyIBDADAwydvMPQqeEiK54FG1DHwT5sQejAaJOb+PsOhVa4fLcKsrO3F
-g5CxO+/9BHCXAr8xQAtp/gOhDN05fyK3MFyGlL9s+Cd8xf34S3R4rN/qbF0oZmaa
-FW0MuGnniq54HINs8KshadVn1Dhi/GYSJ588qNFRl/qxFTYAk+zaGsgX/QgFfy0f
-djWXJLypZXu9D6DlyJ0cPSzUlfBkI2Ytx6grzIquRjY0FbkjK3l+iGsQ+ebRMdcP
-Sqd5iTN9XuzIUVoBFAZBRjibKV3N2wxlnCbfLlzCyDp7rktzSThzjJ2pVDuLrMAx
-6/L9hIhwmFwdtY4FBFGvMR0b0Ugh3kCsRWr8sgj9I7dUoLHid6ObYhJFhnD3GzRc
-U+xX1uy3iTCqJDsG334aQIhC5Giuxln4SUZna2MNbq65ksh38N1aM/t3+Dc/TKVB
-rb5KWicRPCQ4DIQkHMDCSPyj+dvRLCPzIaPvHD7IrCfHYHOWuvvPGCpwjo0As3iP
-IecoMeguPLVaqgcAEQEAAQAL/i5/pQaUd4G7LDydpbixPS6r9UrfPrU/y5zvBP/p
-DCynPDutJ1oq539pZvXQ2VwEJJy7x0UVKkjyMndJLNWly9wHC7o8jkHx/NalVP47
-LXR+GWbCdOOcYYbdAWcCNB3zOtzPnWhdAEagkc2G9xRQDIB0dLHLCIUpCbLP/CWM
-qlHnDsVMrVTWjgzcpsnyGgw8NeLYJtYGB8dsN+XgCCjo7a9LEvUBKNgdmWBbf14/
-iBw7PCugazFcH9QYfZwzhsi3nqRRagTXHbxFRG0LD9Ro9qCEutHYGP2PJ59Nj8+M
-zaVkJj/OxWxVOGvn2q16mQBCjKpbWfqXZVVl+G5DGOmiSTZqXy+3j6JCKdOMy6Qd
-JBHOHhFZXYmWYaaPzoc33T/C3QhMfY5sOtUDLJmV05Wi4dyBeNBEslYgUuTk/jXb
-5ZAie25eDdrsoqkcnSs2ZguMF7AXhe6il2zVhUUMs/6UZgd6I7I4Is0HXT/pnxEp
-uiTRFu4v8E+u+5a8O3pffe5boQYA3TsIxceen20qY+kRaTOkURHMZLn/y6KLW8bZ
-rNJyXWS9hBAcbbSGhfOwYfzbDCM17yPQO3E2zo8lcGdRklUdIIaCxQwtu36N5dfx
-OLCCQc5LmYdl/EAm91iAhrr7dNntZ18MU09gdzUu+ONZwu4CP3cJT83+qYZULso8
-4Fvd/X8IEfGZ7kM+ylrdqBwtlrn8yYXtom+ows2M2UuNR53B+BUOd73kVLTkTCjE
-JH63+nE8BqG7tDLCMws+23SAA3xxBgDfDrr0x7zCozQKVQEqBzQr9Uoo/c/ZjAfi
-syzNSrDz+g5gqJYtuL9XpPJVWf6V1GXVyJlSbxR9CjTkBxmlPxpvV25IsbVSsh0o
-aqkf2eWpbCL6Qb2E0jd1rvf8sGeTTohzYfiSVVsC2t9ngRO/CmetizwQBvRzLGMZ
-4mtAPiy7ZEDc2dFrPp7zlKISYmJZUx/DJVuZWuOrVMpBP+bSgJXoMTlICxZUqUnE
-2VKVStb/L+Tl8XCwIWdrZb9BaDnHqfcGAM2B4HNPxP88Yj1tEDly/vqeb3vVMhj+
-S1lunnLdgxp46YyuTMYAzj88eCGurRtzBsdxxlGAsioEnZGebEqAHQbieKq/DO6I
-MOMZHMSVBDqyyIx3assGlxSX8BSFW0lhKyT7i0XqnAgCJ9f/5oq0SbFGq+01VQb7
-jIx9PbcYJORxsE0JG/CXXPv27bRtQXsudkWGSYvC0NLOgk4z8+kQpQtyFh16lujq
-WRwMeriu0qNDjCa1/eHIKDovhAZ3GyO5/9m1tBlUZXN0IFVzZXIgPHRlc3RAdGVz
-dC5jb20+iQHOBBMBCAA4AhsDBQsJCAcCBhUKCQgLAgQWAgMBAh4BAheAFiEEjrR8
-MQ4fJK44PYMvfN2AClLmXiYFAmDcEZEACgkQfN2AClLmXibZzgv/ZfeTpTuqQE1W
-C1jT5KpQExnt0BizTX0U7BvSn8Fr6VXTyol6kYc3u71GLUuJyawCLtIzOXqOXJvz
-bjcZqymcMADuftKcfMy513FhbF6MhdVd6QoeBP6+7/xXOFJCi+QVYF7SQ2h7K1Qm
-+yXOiAMgSxhCZQGPBNJLlDUOd47nSIMANvlumFtmLY/1FD7RpG7WQWjeX1mnxNTw
-hUU+Yv7GuFc/JprXCIYqHbhWfvXyVtae2ZK4xuVi5eqwA2RfggOVM7drb+CgPhG0
-+9aEDDLOZqVi65wK7J73Puo3rFTbPQMljxw5s27rWqF+vB6hhVdJOPNomWy3naPi
-k5MW0mhsacASz1WYndpZz+XaQTq/wJF5HUyyeUWJ0vlOEdwx021PHcqSTyfNnkjD
-KncrE21t2sxWRsgGDETxIwkd2b2HNGAvveUD0ffFK/oJHGSXjAERFGc3wuiDj3mQ
-BvKm4wt4QF9ZMrCdhMAA6ax5kfEUqQR4ntmrJk/khp/mV7TILaI4nQVYBGBjIyIB
-DADghIo9wXnRxzfdDTvwnP8dHpLAIaPokgdpyLswqUCixJWiW2xcV6weUjEWwH6n
-eN/t1uZYVehbrotxVPla+MPvzhxp6/cmG+2lhzEBOp6zRwnL1wIB6HoKJfpREhyM
-c8rLR0zMso1L1bJTyydvnu07a7BWo3VWKjilb0rEZZUSD/2hidx5HxMOJSoidLWe
-d/PPuv6yht3NtA4UThlcfldm9G6PbqCdm1kMEKAkq0wVJvhPJ6gEFRNJimgygfUw
-MDFXEIhQtxjgdV5Uoz3O5452VLoRsDlgpi3E0WDGj7WXDaO5uSU0T5aJgVgHCP/f
-xZhHuQFk2YYIl5nCBpOZyWWI0IKmscTuEwzpkhICQDQFvcMZ5ibsl7wA2P7YTrQf
-FDMjjzuaK80GYPfxDFlyKUyLqFt8w/QzsZLDLX7+jxIEpbRAaMw/JsWqm5BMxxbS
-3CIQiS5S3oSKDsNINelqWFfwvLhvlQra8gIxyNTlek25OdgG66BiiX+seH8A/ql+
-F+MAEQEAAQAL/1jrNSLjMt9pwo6qFKClVQZP2vf7+sH7v7LeHIDXr3EnYUnVYnOq
-B1FU5PspTp/+J9W25DB9CZLx7Gj8qeslFdiuLSOoIBB4RCToB3kAoeTH0DHqW/Gs
-hFTrmJkuDp9zpo/ek6SIXJx5rHAyR9KVw0fizQprH2f6PcgLbTWeM61dJuqowmg3
-7eCOyIKv7VQvFqEhYokLD+JNmrvg+Htg0DXGvdjRjAwPf/NezEXpj67a6cHTp1/C
-hwp7pevG+3fTxaCJFesl5/TxxtnaBLE8m2uo/S6Hxgn9l0edonroe1QlTjEqGLy2
-7qi2z5Rem+v6GWNDRgvAWur13v8FNdyduHlioG/NgRsU9mE2MYeFsfi3cfNpJQp/
-wC9PSCIXrb/45mkS8KyjZpCrIPB9RV/m0MREq01TPom7rstZc4A1pD0Ot7AtUYS3
-e95zLyEmeLziPJ9fV4fgPmEudDr1uItnmV0LOskKlpg5sc0hhdrwYoobfkKt2dx6
-DqfMlcM1ZkUbLQYA4jwfpFJG4HmYvjL2xCJxM0ycjvMbqFN+4UjgYWVlRfOrm1V4
-Op86FjbRbV6OOCNhznotAg7mul4xtzrrTkK8o3YLBeJseDgl4AWuzXtNa9hE0XpK
-9gJoEHUuBOOsamVh2HpXESFyE5CclOV7JSh541TlZKfnqfZYCg4JSbp0UijkawCL
-5bJJUiGGMD9rZUxIAKQO1DvUEzptS7Jl6S3y5sbIIhilp4KfYWbSk3PPu9CnZD5b
-LhEQp0elxnb/IL8PBgD+DpTeC8unkGKXUpbe9x0ISI6V1D6FmJq/FxNg7fMa3QCh
-fGiAyoTm80ZETynj+blRaDO3gY4lTLa3Opubof1EqK2QmwXmpyvXEZNYcQfQ2CCS
-GOWUCK8jEQamUPf1PWndZXJUmROI1WukhlL71V/ir6zQeVCv1wcwPwclJPnAe87u
-pEklnCYpvsEldwHUX9u0BWzoULIEsi+ddtHmT0KTeF/DHRy0W15jIHbjFqhqckj1
-/6fmr7l7kIi/kN4vWe0F/0Q8IXX+cVMgbl3aIuaGcvENLGcoAsAtPGx88SfRgmfu
-HK64Y7hx1m+Bo215rxJzZRjqHTBPp0BmCi+JKkaavIBrYRbsx20gveI4dzhLcUhB
-kiT4Q7oz0/VbGHS1CEf9KFeS/YOGj57s4yHauSVI0XdP9kBRTWmXvBkzsooB2cKH
-hwhUN7iiT1k717CiTNUT6Q/pcPFCyNuMoBBGQTU206JEgIjQvI3f8xMUMGmGVVQz
-9/k716ycnhb2JZ/Q/AyQIeHJiQG2BBgBCAAgAhsMFiEEjrR8MQ4fJK44PYMvfN2A
-ClLmXiYFAmDcEa4ACgkQfN2AClLmXiZxxQv/XaMN0hPCygtrQMbCsTNb34JbvJzh
-hngPuUAfTbRHrR3YeATyQofNbL0DD3fvfzeFF8qESqvzCSZxS6dYsXPd4MCJTzlp
-zYBZ2X0sOrgDqZvqCZKN72RKgdk0KvthdzAxsIm2dfcQOxxowXMxhJEXZmsFpusx
-jKJxOcrfVRjXJnh9isY0NpCoqMQ+3k3wDJ3VGEHV7G+A+vFkWfbLJF5huQ96uaH9
-Uc+jUsREUH9G82ZBqpoioEN8Ith4VXpYnKdTMonK/+ZcyeraJZhXrvbjnEomKdzU
-0pu4bt1HlLR3dcnpjN7b009MBf2xLgEfQk2nPZ4zzY+tDkxygtPllaB4dldFjBpT
-j7Q+t49sWMjmlJUbLlHfuJ7nUUK5+cGjBsWVObAEcyfemHWCTVFnEa2BJslGC08X
-rFcjRRcMEr9ct4551QFBHsv3O/Wp3/wqczYgE9itSnGT05w+4vLt4smG+dnEHjRJ
-brMb2upTHa+kjktjdO96/BgSnKYqmNmPB/qB
-=ivA/
------END PGP PRIVATE KEY BLOCK-----
- """
-
- DEFAULT_KEY_ID = "8EB47C310E1F24AE383D832F7CDD800A52E65E26"
-
- NON_DEFAULT_KEY = """
------BEGIN PGP PRIVATE KEY BLOCK-----
-
-lQVYBGBjI0ABDADGWBRp+t02emfzUlhrc1psqIhhecFm6Em0Kv33cfDpnfoMF1tK
-Yy/4eLYIR7FmpdbFPcDThFNHbXJzBi00L1mp0XQE2l50h/2bDAAgREdZ+NVo5a7/
-RSZjauNU1PxW6pnXMehEh1tyIQmV78jAukaakwaicrpIenMiFUN3fAKHnLuFffA6
-t0f3LqJvTDhUw/o2vPgw5e6UDQhA1C+KTv1KXVrhJNo88a3hZqCZ76z3drKR411Q
-zYgT4DUb8lfnbN+z2wfqT9oM5cegh2k86/mxAA3BYOeQrhmQo/7uhezcgbxtdGZr
-YlbuaNDTSBrn10ZoaxLPo2dJe2zWxgD6MpvsGU1w3tcRW508qo/+xoWp2/pDzmok
-+uhOh1NAj9zB05VWBz1r7oBgCOIKpkD/LD4VKq59etsZ/UnrYDwKdXWZp7uhshkU
-M7N35lUJcR76a852dlMdrgpmY18+BP7+o7M+5ElHTiqQbMuE1nHTg8RgVpdV+tUx
-dg6GWY/XHf5asm8AEQEAAQAL/A85epOp+GnymmEQfI3+5D178D//Lwu9n86vECB6
-xAHCqQtdjZnXpDp/1YUsL59P8nzgYRk7SoMskQDoQ/cB/XFuDOhEdMSgHaTVlnrj
-ktCCq6rqGnUosyolbb64vIfVaSqd/5SnCStpAsnaBoBYrAu4ZmV4xfjDQWwn0q5s
-u+r56mD0SkjPgbwk/b3qTVagVmf2OFzUgWwm1e/X+bA1oPag1NV8VS4hZPXswT4f
-qhiyqUFOgP6vUBcqehkjkIDIl/54xII7/P5tp3LIZawvIXqHKNTqYPCqaCqCj+SL
-vMYDIb6acjescfZoM71eAeHAANeFZzr/rwfBT+dEP6qKmPXNcvgE11X44ZCr04nT
-zOV/uDUifEvKT5qgtyJpSFEVr7EXubJPKoNNhoYqq9z1pYU7IedX5BloiVXKOKTY
-0pk7JkLqf3g5fYtXh/wol1owemITJy5V5PgaqZvk491LkI6S+kWC7ANYUg+TDPIW
-afxW3E5N1CYV6XDAl0ZihbLcoQYAy0Ky/p/wayWKePyuPBLwx9O89GSONK2pQljZ
-yaAgxPQ5/i1vx6LIMg7k/722bXR9W3zOjWOin4eatPM3d2hkG96HFvnBqXSmXOPV
-03Xqy1/B5Tj8E9naLKUHE/OBQEc363DgLLG9db5HfPlpAngeppYPdyWkhzXyzkgS
-PylaE5eW3zkdjEbYJ6RBTecTZEgBaMvJNPdWbn//frpP7kGvyiCg5Es+WjLInUZ6
-0sdifcNTCewzLXK80v/y5mVOdJhPBgD5zs9cYdyiQJayqAuOr+He1eMHMVUbm9as
-qBmPrst398eBW9ZYF7eBfTSlUf6B+WnvyLKEGsUf/7IK0EWDlzoBuWzWiHjUAY1g
-m9eTV2MnvCCCefqCErWwfFo2nWOasAZA9sKD+ICIBY4tbtvSl4yfLBzTMwSvs9ZS
-K1ocPSYUnhm2miSWZ8RLZPH7roHQasNHpyq/AX7DahFf2S/bJ+46ZGZ8Pigr7hA+
-MjmpQ4qVdb5SaViPmZhAKO+PjuCHm+EF/2H0Y3Sl4eXgxZWoQVOUeXdWg9eMfYrj
-XDtUMIFppV/QxbeztZKvJdfk64vt/crvLsOp0hOky9cKwY89r4QaHfexU3qR+qDq
-UlMvR1rHk7dS5HZAtw0xKsFJNkuDxvBkMqv8Los8zp3nUl+U99dfZOArzNkW38wx
-FPa0ixkC9za2BkDrWEA8vTnxw0A2upIFegDUhwOByrSyfPPnG3tKGeqt3Izb/kDk
-Q9vmo+HgxBOguMIvlzbBfQZwtbd/gXzlvPqCtCJBbm90aGVyIFRlc3QgVXNlciA8
-dGVzdDJAdGVzdC5jb20+iQHOBBMBCAA4AhsDBQsJCAcCBhUKCQgLAgQWAgMBAh4B
-AheAFiEEapM5P1DF5qzT1vtFuTYhLttOFMAFAmDcEeEACgkQuTYhLttOFMDe0Qv/
-Qx/bzXztJ3BCc+CYAVDx7Kr37S68etwwLgcWzhG+CDeMB5F/QE+upKgxy2iaqQFR
-mxfOMgf/TIQkUfkbaASzK1LpnesYO85pk7XYjoN1bYEHiXTkeW+bgB6aJIxrRmO2
-SrWasdBC/DsI3Mrya8YMt/TiHC6VpRJVxCe5vv7/kZC4CXrgTBnZocXx/YXimbke
-poPMVdbvhYh6N0aGeS38jRKgyN10KXmhDTAQDwseVFavBWAjVfx3DEwjtK2Z2GbA
-aL8JvAwRtqiPFkDMIKPL4UwxtXFws8SpMt6juroUkNyf6+BxNWYqmwXHPy8zCJAb
-xkxIJMlEc+s7qQsP3fILOo8Xn+dVzJ5sa5AoARoXm1GMjsdqaKAzq99Dic/dHnaQ
-Civev1PQsdwlYW2C2wNXNeIrxMndbDMFfNuZ6BnGHWJ/wjcp/pFs4YkyyZN8JH7L
-hP2FO4Jgham3AuP13kC3Ivea7V6hR8QNcDZRwFPOMIX4tXwQv1T72+7DZGaA25O7
-nQVXBGBjI0ABDADJMBYIcG0Yil9YxFs7aYzNbd7alUAr89VbY8eIGPHP3INFPM1w
-lBQCu+4j6xdEbhMpppLBZ9A5TEylP4C6qLtPa+oLtPeuSw8gHDE10XE4lbgPs376
-rL60XdImSOHhiduACUefYjqpcmFH9Bim1CC+koArYrSQJQx1Jri+OpnTaL/8UID0
-KzD/kEgMVGlHIVj9oJmb4+j9pW8I/g0wDSnIaEKFMxqu6SIVJ1GWj+MUMvZigjLC
-sNCZd7PnbOC5VeU3SsXj6he74Jx0AmGMPWIHi9M0DjHO5d1cCbXTnud8xxM1bOh4
-7aCTnMK5cVyIr+adihgJpVVhrndSM8aklBPRgtozrGNCgF2CkYU2P1blxfloNr/8
-UZpM83o+s1aObBszzRNLxnpNORqoLqjfPtLEPQnagxE+4EapCq0NZ/x6yO5VTwwp
-NljdFAEk40uGuKyn1QA3uNMHy5DlpLl+tU7t1KEovdZ+OVYsYKZhVzw0MTpKogk9
-JI7AN0q62ronPskAEQEAAQAL+O8BUSt1ZCVjPSIXIsrR+ZOSkszZwgJ1CWIoh0IH
-YD2vmcMHGIhFYgBdgerpvhptKhaw7GcXDScEnYkyh5s4GE2hxclik1tbj/x1gYCN
-8BNoyeDdPFxQG73qN12D99QYEctpOsz9xPLIDwmL0j1ehAfhwqHIAPm9Ca+i8JYM
-x/F+35S/jnKDXRI+NVlwbiEyXKXxxIqNlpy9i8sDBGexO5H5Sg0zSN/B1duLekGD
-biDw6gLc6bCgnS+0JOUpU07Z2fccMOY9ncjKGD2uIb/ePPUaek92GCQyq0eorCIV
-brcQsRc5sSsNtnRKQTQtxioROeDg7kf2oWySeHTswlXW/219ihrSXgteHJd+rPm7
-DYLEeGLRny8bRKv8rQdAtApHaJE4dAATXeY4RYo4NlXHYaztGYtU6kiM/3zCfWAe
-9Nn+Wh9jMTZrjefUCagS5r6ZqAh7veNo/vgIGaCLh0a1Ypa0Yk9KFrn3LYEM3zgk
-3m3bn+7qgy5cUYXoJ3DGJJEhBgDPonpW0WElqLs5ZMem1ha85SC38F0IkAaSuzuz
-v3eORiKWuyJGF32Q2XHa1RHQs1JtUKd8rxFer3b8Oq71zLz6JtVc9dmRudvgcJYX
-0PC11F6WGjZFSSp39dajFp0A5DKUs39F3w7J1yuDM56TDIN810ywufGAHARY1pZb
-UJAy/dTqjFnCbNjpAakor3hVzqxcmUG+7Y2X9c2AGncT1MqAQC3M8JZcuZvkK8A9
-cMk8B914ryYE7VsZMdMhyTwHmykGAPgNLLa3RDETeGeGCKWI+ZPOoU0ib5JtJZ1d
-P3tNwfZKuZBZXKW9gqYqyBa/qhMip84SP30pr/TvulcdAFC759HK8sQZyJ6Vw24P
-c+5ssRxrQUEw1rvJPWhmQCmCOZHBMQl5T6eaTOpR5u3aUKTMlxPKhK9eC1dCSTnI
-/nyL8An3VKnLy+K/LI42YGphBVLLJmBewuTVDIJviWRdntiG8dElyEJMOywUltk3
-2CEmqgsD9tPO8rXZjnMrMn3gfsiaoQYA6/6/e2utkHr7gAoWBgrBBdqVHsvqh5Ro
-2DjLAOpZItO/EdCJfDAmbTYOa04535sBDP2tcH/vipPOPpbr1Y9Y/mNsKCulNxed
-yqAmEkKOcerLUP5UHju0AB6VBjHJFdU2mqT+UjPyBk7WeKXgFomyoYMv3KpNOFWR
-xi0Xji4kKHbttA6Hy3UcGPr9acyUAlDYeKmxbSUYIPhw32bbGrX9+F5YriTufRsG
-3jftQVo9zqdcQSD/5pUTMn3EYbEcohYB2YWJAbYEGAEIACACGwwWIQRqkzk/UMXm
-rNPW+0W5NiEu204UwAUCYNwR6wAKCRC5NiEu204UwOPnC/92PgB1c3h9FBXH1maz
-g29fndHIHH65VLgqMiQ7HAMojwRlT5Xnj5tdkCBmszRkv5vMvdJRa3ZY8Ed/Inqr
-hxBFNzpjqX4oj/RYIQLKXWWfkTKYVLJFZFPCSo00jesw2gieu3Ke/Yy4gwhtNodA
-v+s6QNMvffTW/K3XNrWDB0E7/LXbdidzhm+MBu8ov2tuC3tp9liLICiE1jv/2xT4
-CNSO6yphmk1/1zEYHS/mN9qJ2csBmte2cdmGyOcuVEHk3pyINNMDOamaURBJGRwF
-XB5V7gTKUFU4jCp3chywKrBHJHxGGDUmPBmZtDtfWAOgL32drK7/KUyzZL/WO7Fj
-akOI0hRDFOcqTYWL20H7+hAiX3oHMP7eou3L5C7wJ9+JMcACklN/WMjG9a536DFJ
-4UgZ6HyKPP+wy837Hbe8b25kNMBwFgiaLR0lcgzxj7NyQWjVCMOEN+M55tRCjvL6
-ya6JVZCRbMXfdCy8lVPgtNQ6VlHaj8Wvnn2FLbWWO2n2r3s=
-=9zU5
------END PGP PRIVATE KEY BLOCK-----
-"""
-
- NON_DEFAULT_KEY_ID = "6A93393F50C5E6ACD3D6FB45B936212EDB4E14C0"
-
- def setUp(self):
- super().setUp()
- self.gpg_dir = os.path.join(self.test_dir, "gpg")
- os.mkdir(self.gpg_dir, mode=0o700)
- # Ignore errors when deleting GNUPGHOME, because of race conditions
- # (e.g. the gpg-agent socket having been deleted). See
- # https://github.com/jelmer/dulwich/issues/1000
- self.addCleanup(shutil.rmtree, self.gpg_dir, ignore_errors=True)
- self.overrideEnv("GNUPGHOME", self.gpg_dir)
-
- def import_default_key(self):
- subprocess.run(
- ["gpg", "--import"],
- stdout=subprocess.DEVNULL,
- stderr=subprocess.DEVNULL,
- input=PorcelainGpgTestCase.DEFAULT_KEY,
- text=True,
- )
-
- def import_non_default_key(self):
- subprocess.run(
- ["gpg", "--import"],
- stdout=subprocess.DEVNULL,
- stderr=subprocess.DEVNULL,
- input=PorcelainGpgTestCase.NON_DEFAULT_KEY,
- text=True,
- )
-
-
-class ArchiveTests(PorcelainTestCase):
- """Tests for the archive command."""
-
- def test_simple(self):
- c1, c2, c3 = build_commit_graph(
- self.repo.object_store, [[1], [2, 1], [3, 1, 2]]
- )
- self.repo.refs[b"refs/heads/master"] = c3.id
- out = BytesIO()
- err = BytesIO()
- porcelain.archive(
- self.repo.path, b"refs/heads/master", outstream=out, errstream=err
- )
- self.assertEqual(b"", err.getvalue())
- tf = tarfile.TarFile(fileobj=out)
- self.addCleanup(tf.close)
- self.assertEqual([], tf.getnames())
-
-
-class UpdateServerInfoTests(PorcelainTestCase):
- def test_simple(self):
- c1, c2, c3 = build_commit_graph(
- self.repo.object_store, [[1], [2, 1], [3, 1, 2]]
- )
- self.repo.refs[b"refs/heads/foo"] = c3.id
- porcelain.update_server_info(self.repo.path)
- self.assertTrue(
- os.path.exists(os.path.join(self.repo.controldir(), "info", "refs"))
- )
-
-
-class CommitTests(PorcelainTestCase):
- def test_custom_author(self):
- c1, c2, c3 = build_commit_graph(
- self.repo.object_store, [[1], [2, 1], [3, 1, 2]]
- )
- self.repo.refs[b"refs/heads/foo"] = c3.id
- sha = porcelain.commit(
- self.repo.path,
- message=b"Some message",
- author=b"Joe <joe@example.com>",
- committer=b"Bob <bob@example.com>",
- )
- self.assertIsInstance(sha, bytes)
- self.assertEqual(len(sha), 40)
-
- def test_unicode(self):
- c1, c2, c3 = build_commit_graph(
- self.repo.object_store, [[1], [2, 1], [3, 1, 2]]
- )
- self.repo.refs[b"refs/heads/foo"] = c3.id
- sha = porcelain.commit(
- self.repo.path,
- message="Some message",
- author="Joe <joe@example.com>",
- committer="Bob <bob@example.com>",
- )
- self.assertIsInstance(sha, bytes)
- self.assertEqual(len(sha), 40)
-
- def test_no_verify(self):
- if os.name != "posix":
- self.skipTest("shell hook tests requires POSIX shell")
- self.assertTrue(os.path.exists("/bin/sh"))
-
- hooks_dir = os.path.join(self.repo.controldir(), "hooks")
- os.makedirs(hooks_dir, exist_ok=True)
- self.addCleanup(shutil.rmtree, hooks_dir)
-
- c1, c2, c3 = build_commit_graph(
- self.repo.object_store, [[1], [2, 1], [3, 1, 2]]
- )
-
- hook_fail = "#!/bin/sh\nexit 1"
-
- # hooks are executed in pre-commit, commit-msg order
- # test commit-msg failure first, then pre-commit failure, then
- # no_verify to skip both hooks
- commit_msg = os.path.join(hooks_dir, "commit-msg")
- with open(commit_msg, "w") as f:
- f.write(hook_fail)
- os.chmod(commit_msg, stat.S_IREAD | stat.S_IWRITE | stat.S_IEXEC)
-
- with self.assertRaises(CommitError):
- porcelain.commit(
- self.repo.path,
- message="Some message",
- author="Joe <joe@example.com>",
- committer="Bob <bob@example.com>",
- )
-
- pre_commit = os.path.join(hooks_dir, "pre-commit")
- with open(pre_commit, "w") as f:
- f.write(hook_fail)
- os.chmod(pre_commit, stat.S_IREAD | stat.S_IWRITE | stat.S_IEXEC)
-
- with self.assertRaises(CommitError):
- porcelain.commit(
- self.repo.path,
- message="Some message",
- author="Joe <joe@example.com>",
- committer="Bob <bob@example.com>",
- )
-
- sha = porcelain.commit(
- self.repo.path,
- message="Some message",
- author="Joe <joe@example.com>",
- committer="Bob <bob@example.com>",
- no_verify=True,
- )
- self.assertIsInstance(sha, bytes)
- self.assertEqual(len(sha), 40)
-
- def test_timezone(self):
- c1, c2, c3 = build_commit_graph(
- self.repo.object_store, [[1], [2, 1], [3, 1, 2]]
- )
- self.repo.refs[b"refs/heads/foo"] = c3.id
- sha = porcelain.commit(
- self.repo.path,
- message="Some message",
- author="Joe <joe@example.com>",
- author_timezone=18000,
- committer="Bob <bob@example.com>",
- commit_timezone=18000,
- )
- self.assertIsInstance(sha, bytes)
- self.assertEqual(len(sha), 40)
-
- commit = self.repo.get_object(sha)
- self.assertEqual(commit._author_timezone, 18000)
- self.assertEqual(commit._commit_timezone, 18000)
-
- self.overrideEnv("GIT_AUTHOR_DATE", "1995-11-20T19:12:08-0501")
- self.overrideEnv("GIT_COMMITTER_DATE", "1995-11-20T19:12:08-0501")
-
- sha = porcelain.commit(
- self.repo.path,
- message="Some message",
- author="Joe <joe@example.com>",
- committer="Bob <bob@example.com>",
- )
- self.assertIsInstance(sha, bytes)
- self.assertEqual(len(sha), 40)
-
- commit = self.repo.get_object(sha)
- self.assertEqual(commit._author_timezone, -18060)
- self.assertEqual(commit._commit_timezone, -18060)
-
- self.overrideEnv("GIT_AUTHOR_DATE", None)
- self.overrideEnv("GIT_COMMITTER_DATE", None)
-
- local_timezone = time.localtime().tm_gmtoff
-
- sha = porcelain.commit(
- self.repo.path,
- message="Some message",
- author="Joe <joe@example.com>",
- committer="Bob <bob@example.com>",
- )
- self.assertIsInstance(sha, bytes)
- self.assertEqual(len(sha), 40)
-
- commit = self.repo.get_object(sha)
- self.assertEqual(commit._author_timezone, local_timezone)
- self.assertEqual(commit._commit_timezone, local_timezone)
-
-
-@skipIf(
- platform.python_implementation() == "PyPy" or sys.platform == "win32",
- "gpgme not easily available or supported on Windows and PyPy",
-)
-class CommitSignTests(PorcelainGpgTestCase):
- def test_default_key(self):
- c1, c2, c3 = build_commit_graph(
- self.repo.object_store, [[1], [2, 1], [3, 1, 2]]
- )
- self.repo.refs[b"HEAD"] = c3.id
- cfg = self.repo.get_config()
- cfg.set(("user",), "signingKey", PorcelainGpgTestCase.DEFAULT_KEY_ID)
- self.import_default_key()
-
- sha = porcelain.commit(
- self.repo.path,
- message="Some message",
- author="Joe <joe@example.com>",
- committer="Bob <bob@example.com>",
- signoff=True,
- )
- self.assertIsInstance(sha, bytes)
- self.assertEqual(len(sha), 40)
-
- commit = self.repo.get_object(sha)
- # GPG Signatures aren't deterministic, so we can't do a static assertion.
- commit.verify()
- commit.verify(keyids=[PorcelainGpgTestCase.DEFAULT_KEY_ID])
-
- self.import_non_default_key()
- self.assertRaises(
- gpg.errors.MissingSignatures,
- commit.verify,
- keyids=[PorcelainGpgTestCase.NON_DEFAULT_KEY_ID],
- )
-
- commit.committer = b"Alice <alice@example.com>"
- self.assertRaises(
- gpg.errors.BadSignatures,
- commit.verify,
- )
-
- def test_non_default_key(self):
- c1, c2, c3 = build_commit_graph(
- self.repo.object_store, [[1], [2, 1], [3, 1, 2]]
- )
- self.repo.refs[b"HEAD"] = c3.id
- cfg = self.repo.get_config()
- cfg.set(("user",), "signingKey", PorcelainGpgTestCase.DEFAULT_KEY_ID)
- self.import_non_default_key()
-
- sha = porcelain.commit(
- self.repo.path,
- message="Some message",
- author="Joe <joe@example.com>",
- committer="Bob <bob@example.com>",
- signoff=PorcelainGpgTestCase.NON_DEFAULT_KEY_ID,
- )
- self.assertIsInstance(sha, bytes)
- self.assertEqual(len(sha), 40)
-
- commit = self.repo.get_object(sha)
- # GPG Signatures aren't deterministic, so we can't do a static assertion.
- commit.verify()
-
-
-class TimezoneTests(PorcelainTestCase):
- def put_envs(self, value):
- self.overrideEnv("GIT_AUTHOR_DATE", value)
- self.overrideEnv("GIT_COMMITTER_DATE", value)
-
- def fallback(self, value):
- self.put_envs(value)
- self.assertRaises(porcelain.TimezoneFormatError, porcelain.get_user_timezones)
-
- def test_internal_format(self):
- self.put_envs("0 +0500")
- self.assertTupleEqual((18000, 18000), porcelain.get_user_timezones())
-
- def test_rfc_2822(self):
- self.put_envs("Mon, 20 Nov 1995 19:12:08 -0500")
- self.assertTupleEqual((-18000, -18000), porcelain.get_user_timezones())
-
- self.put_envs("Mon, 20 Nov 1995 19:12:08")
- self.assertTupleEqual((0, 0), porcelain.get_user_timezones())
-
- def test_iso8601(self):
- self.put_envs("1995-11-20T19:12:08-0501")
- self.assertTupleEqual((-18060, -18060), porcelain.get_user_timezones())
-
- self.put_envs("1995-11-20T19:12:08+0501")
- self.assertTupleEqual((18060, 18060), porcelain.get_user_timezones())
-
- self.put_envs("1995-11-20T19:12:08-05:01")
- self.assertTupleEqual((-18060, -18060), porcelain.get_user_timezones())
-
- self.put_envs("1995-11-20 19:12:08-05")
- self.assertTupleEqual((-18000, -18000), porcelain.get_user_timezones())
-
- # https://github.com/git/git/blob/96b2d4fa927c5055adc5b1d08f10a5d7352e2989/t/t6300-for-each-ref.sh#L128
- self.put_envs("2006-07-03 17:18:44 +0200")
- self.assertTupleEqual((7200, 7200), porcelain.get_user_timezones())
-
- def test_missing_or_malformed(self):
- # TODO: add more here
- self.fallback("0 + 0500")
- self.fallback("a +0500")
-
- self.fallback("1995-11-20T19:12:08")
- self.fallback("1995-11-20T19:12:08-05:")
-
- self.fallback("1995.11.20")
- self.fallback("11/20/1995")
- self.fallback("20.11.1995")
-
- def test_different_envs(self):
- self.overrideEnv("GIT_AUTHOR_DATE", "0 +0500")
- self.overrideEnv("GIT_COMMITTER_DATE", "0 +0501")
- self.assertTupleEqual((18000, 18060), porcelain.get_user_timezones())
-
- def test_no_envs(self):
- local_timezone = time.localtime().tm_gmtoff
-
- self.put_envs("0 +0500")
- self.assertTupleEqual((18000, 18000), porcelain.get_user_timezones())
-
- self.overrideEnv("GIT_COMMITTER_DATE", None)
- self.assertTupleEqual((18000, local_timezone), porcelain.get_user_timezones())
-
- self.put_envs("0 +0500")
- self.overrideEnv("GIT_AUTHOR_DATE", None)
- self.assertTupleEqual((local_timezone, 18000), porcelain.get_user_timezones())
-
- self.put_envs("0 +0500")
- self.overrideEnv("GIT_AUTHOR_DATE", None)
- self.overrideEnv("GIT_COMMITTER_DATE", None)
- self.assertTupleEqual(
- (local_timezone, local_timezone), porcelain.get_user_timezones()
- )
-
-
-class CleanTests(PorcelainTestCase):
- def put_files(self, tracked, ignored, untracked, empty_dirs):
- """Put the described files in the wd."""
- all_files = tracked | ignored | untracked
- for file_path in all_files:
- abs_path = os.path.join(self.repo.path, file_path)
- # File may need to be written in a dir that doesn't exist yet, so
- # create the parent dir(s) as necessary
- parent_dir = os.path.dirname(abs_path)
- try:
- os.makedirs(parent_dir)
- except FileExistsError:
- pass
- with open(abs_path, "w") as f:
- f.write("")
-
- with open(os.path.join(self.repo.path, ".gitignore"), "w") as f:
- f.writelines(ignored)
-
- for dir_path in empty_dirs:
- os.mkdir(os.path.join(self.repo.path, "empty_dir"))
-
- files_to_add = [os.path.join(self.repo.path, t) for t in tracked]
- porcelain.add(repo=self.repo.path, paths=files_to_add)
- porcelain.commit(repo=self.repo.path, message="init commit")
-
- def assert_wd(self, expected_paths):
- """Assert paths of files and dirs in wd are same as expected_paths."""
- control_dir_rel = os.path.relpath(self.repo._controldir, self.repo.path)
-
- # normalize paths to simplify comparison across platforms
- found_paths = {
- os.path.normpath(p)
- for p in flat_walk_dir(self.repo.path)
- if not p.split(os.sep)[0] == control_dir_rel
- }
- norm_expected_paths = {os.path.normpath(p) for p in expected_paths}
- self.assertEqual(found_paths, norm_expected_paths)
-
- def test_from_root(self):
- self.put_files(
- tracked={"tracked_file", "tracked_dir/tracked_file", ".gitignore"},
- ignored={"ignored_file"},
- untracked={
- "untracked_file",
- "tracked_dir/untracked_dir/untracked_file",
- "untracked_dir/untracked_dir/untracked_file",
- },
- empty_dirs={"empty_dir"},
- )
-
- porcelain.clean(repo=self.repo.path, target_dir=self.repo.path)
-
- self.assert_wd(
- {
- "tracked_file",
- "tracked_dir/tracked_file",
- ".gitignore",
- "ignored_file",
- "tracked_dir",
- }
- )
-
- def test_from_subdir(self):
- self.put_files(
- tracked={"tracked_file", "tracked_dir/tracked_file", ".gitignore"},
- ignored={"ignored_file"},
- untracked={
- "untracked_file",
- "tracked_dir/untracked_dir/untracked_file",
- "untracked_dir/untracked_dir/untracked_file",
- },
- empty_dirs={"empty_dir"},
- )
-
- porcelain.clean(
- repo=self.repo,
- target_dir=os.path.join(self.repo.path, "untracked_dir"),
- )
-
- self.assert_wd(
- {
- "tracked_file",
- "tracked_dir/tracked_file",
- ".gitignore",
- "ignored_file",
- "untracked_file",
- "tracked_dir/untracked_dir/untracked_file",
- "empty_dir",
- "untracked_dir",
- "tracked_dir",
- "tracked_dir/untracked_dir",
- }
- )
-
-
-class CloneTests(PorcelainTestCase):
- def test_simple_local(self):
- f1_1 = make_object(Blob, data=b"f1")
- commit_spec = [[1], [2, 1], [3, 1, 2]]
- trees = {
- 1: [(b"f1", f1_1), (b"f2", f1_1)],
- 2: [(b"f1", f1_1), (b"f2", f1_1)],
- 3: [(b"f1", f1_1), (b"f2", f1_1)],
- }
-
- c1, c2, c3 = build_commit_graph(self.repo.object_store, commit_spec, trees)
- self.repo.refs[b"refs/heads/master"] = c3.id
- self.repo.refs[b"refs/tags/foo"] = c3.id
- target_path = tempfile.mkdtemp()
- errstream = BytesIO()
- self.addCleanup(shutil.rmtree, target_path)
- r = porcelain.clone(
- self.repo.path, target_path, checkout=False, errstream=errstream
- )
- self.addCleanup(r.close)
- self.assertEqual(r.path, target_path)
- target_repo = Repo(target_path)
- self.assertEqual(0, len(target_repo.open_index()))
- self.assertEqual(c3.id, target_repo.refs[b"refs/tags/foo"])
- self.assertNotIn(b"f1", os.listdir(target_path))
- self.assertNotIn(b"f2", os.listdir(target_path))
- c = r.get_config()
- encoded_path = self.repo.path
- if not isinstance(encoded_path, bytes):
- encoded_path = encoded_path.encode("utf-8")
- self.assertEqual(encoded_path, c.get((b"remote", b"origin"), b"url"))
- self.assertEqual(
- b"+refs/heads/*:refs/remotes/origin/*",
- c.get((b"remote", b"origin"), b"fetch"),
- )
-
- def test_simple_local_with_checkout(self):
- f1_1 = make_object(Blob, data=b"f1")
- commit_spec = [[1], [2, 1], [3, 1, 2]]
- trees = {
- 1: [(b"f1", f1_1), (b"f2", f1_1)],
- 2: [(b"f1", f1_1), (b"f2", f1_1)],
- 3: [(b"f1", f1_1), (b"f2", f1_1)],
- }
-
- c1, c2, c3 = build_commit_graph(self.repo.object_store, commit_spec, trees)
- self.repo.refs[b"refs/heads/master"] = c3.id
- target_path = tempfile.mkdtemp()
- errstream = BytesIO()
- self.addCleanup(shutil.rmtree, target_path)
- with porcelain.clone(
- self.repo.path, target_path, checkout=True, errstream=errstream
- ) as r:
- self.assertEqual(r.path, target_path)
- with Repo(target_path) as r:
- self.assertEqual(r.head(), c3.id)
- self.assertIn("f1", os.listdir(target_path))
- self.assertIn("f2", os.listdir(target_path))
-
- def test_bare_local_with_checkout(self):
- f1_1 = make_object(Blob, data=b"f1")
- commit_spec = [[1], [2, 1], [3, 1, 2]]
- trees = {
- 1: [(b"f1", f1_1), (b"f2", f1_1)],
- 2: [(b"f1", f1_1), (b"f2", f1_1)],
- 3: [(b"f1", f1_1), (b"f2", f1_1)],
- }
-
- c1, c2, c3 = build_commit_graph(self.repo.object_store, commit_spec, trees)
- self.repo.refs[b"refs/heads/master"] = c3.id
- target_path = tempfile.mkdtemp()
- errstream = BytesIO()
- self.addCleanup(shutil.rmtree, target_path)
- with porcelain.clone(
- self.repo.path, target_path, bare=True, errstream=errstream
- ) as r:
- self.assertEqual(r.path, target_path)
- with Repo(target_path) as r:
- r.head()
- self.assertRaises(NoIndexPresent, r.open_index)
- self.assertNotIn(b"f1", os.listdir(target_path))
- self.assertNotIn(b"f2", os.listdir(target_path))
-
- def test_no_checkout_with_bare(self):
- f1_1 = make_object(Blob, data=b"f1")
- commit_spec = [[1]]
- trees = {1: [(b"f1", f1_1), (b"f2", f1_1)]}
-
- (c1,) = build_commit_graph(self.repo.object_store, commit_spec, trees)
- self.repo.refs[b"refs/heads/master"] = c1.id
- self.repo.refs[b"HEAD"] = c1.id
- target_path = tempfile.mkdtemp()
- errstream = BytesIO()
- self.addCleanup(shutil.rmtree, target_path)
- self.assertRaises(
- porcelain.Error,
- porcelain.clone,
- self.repo.path,
- target_path,
- checkout=True,
- bare=True,
- errstream=errstream,
- )
-
- def test_no_head_no_checkout(self):
- f1_1 = make_object(Blob, data=b"f1")
- commit_spec = [[1]]
- trees = {1: [(b"f1", f1_1), (b"f2", f1_1)]}
-
- (c1,) = build_commit_graph(self.repo.object_store, commit_spec, trees)
- self.repo.refs[b"refs/heads/master"] = c1.id
- target_path = tempfile.mkdtemp()
- self.addCleanup(shutil.rmtree, target_path)
- errstream = BytesIO()
- r = porcelain.clone(
- self.repo.path, target_path, checkout=True, errstream=errstream
- )
- r.close()
-
- def test_no_head_no_checkout_outstream_errstream_autofallback(self):
- f1_1 = make_object(Blob, data=b"f1")
- commit_spec = [[1]]
- trees = {1: [(b"f1", f1_1), (b"f2", f1_1)]}
-
- (c1,) = build_commit_graph(self.repo.object_store, commit_spec, trees)
- self.repo.refs[b"refs/heads/master"] = c1.id
- target_path = tempfile.mkdtemp()
- self.addCleanup(shutil.rmtree, target_path)
- errstream = porcelain.NoneStream()
- r = porcelain.clone(
- self.repo.path, target_path, checkout=True, errstream=errstream
- )
- r.close()
-
- def test_source_broken(self):
- with tempfile.TemporaryDirectory() as parent:
- target_path = os.path.join(parent, "target")
- self.assertRaises(
- Exception, porcelain.clone, "/nonexistent/repo", target_path
- )
- self.assertFalse(os.path.exists(target_path))
-
- def test_fetch_symref(self):
- f1_1 = make_object(Blob, data=b"f1")
- trees = {1: [(b"f1", f1_1), (b"f2", f1_1)]}
- [c1] = build_commit_graph(self.repo.object_store, [[1]], trees)
- self.repo.refs.set_symbolic_ref(b"HEAD", b"refs/heads/else")
- self.repo.refs[b"refs/heads/else"] = c1.id
- target_path = tempfile.mkdtemp()
- errstream = BytesIO()
- self.addCleanup(shutil.rmtree, target_path)
- r = porcelain.clone(
- self.repo.path, target_path, checkout=False, errstream=errstream
- )
- self.addCleanup(r.close)
- self.assertEqual(r.path, target_path)
- target_repo = Repo(target_path)
- self.assertEqual(0, len(target_repo.open_index()))
- self.assertEqual(c1.id, target_repo.refs[b"refs/heads/else"])
- self.assertEqual(c1.id, target_repo.refs[b"HEAD"])
- self.assertEqual(
- {
- b"HEAD": b"refs/heads/else",
- b"refs/remotes/origin/HEAD": b"refs/remotes/origin/else",
- },
- target_repo.refs.get_symrefs(),
- )
-
- def test_detached_head(self):
- f1_1 = make_object(Blob, data=b"f1")
- commit_spec = [[1], [2, 1], [3, 1, 2]]
- trees = {
- 1: [(b"f1", f1_1), (b"f2", f1_1)],
- 2: [(b"f1", f1_1), (b"f2", f1_1)],
- 3: [(b"f1", f1_1), (b"f2", f1_1)],
- }
-
- c1, c2, c3 = build_commit_graph(self.repo.object_store, commit_spec, trees)
- self.repo.refs[b"refs/heads/master"] = c2.id
- self.repo.refs.remove_if_equals(b"HEAD", None)
- self.repo.refs[b"HEAD"] = c3.id
- target_path = tempfile.mkdtemp()
- self.addCleanup(shutil.rmtree, target_path)
- errstream = porcelain.NoneStream()
- with porcelain.clone(
- self.repo.path, target_path, checkout=True, errstream=errstream
- ) as r:
- self.assertEqual(c3.id, r.refs[b"HEAD"])
-
-
-class InitTests(TestCase):
- def test_non_bare(self):
- repo_dir = tempfile.mkdtemp()
- self.addCleanup(shutil.rmtree, repo_dir)
- porcelain.init(repo_dir)
-
- def test_bare(self):
- repo_dir = tempfile.mkdtemp()
- self.addCleanup(shutil.rmtree, repo_dir)
- porcelain.init(repo_dir, bare=True)
-
-
-class AddTests(PorcelainTestCase):
- def test_add_default_paths(self):
- # create a file for initial commit
- fullpath = os.path.join(self.repo.path, "blah")
- with open(fullpath, "w") as f:
- f.write("\n")
- porcelain.add(repo=self.repo.path, paths=[fullpath])
- porcelain.commit(
- repo=self.repo.path,
- message=b"test",
- author=b"test <email>",
- committer=b"test <email>",
- )
-
- # Add a second test file and a file in a directory
- with open(os.path.join(self.repo.path, "foo"), "w") as f:
- f.write("\n")
- os.mkdir(os.path.join(self.repo.path, "adir"))
- with open(os.path.join(self.repo.path, "adir", "afile"), "w") as f:
- f.write("\n")
- cwd = os.getcwd()
- try:
- os.chdir(self.repo.path)
- self.assertEqual({"foo", "blah", "adir", ".git"}, set(os.listdir(".")))
- self.assertEqual(
- (["foo", os.path.join("adir", "afile")], set()),
- porcelain.add(self.repo.path),
- )
- finally:
- os.chdir(cwd)
-
- # Check that foo was added and nothing in .git was modified
- index = self.repo.open_index()
- self.assertEqual(sorted(index), [b"adir/afile", b"blah", b"foo"])
-
- def test_add_default_paths_subdir(self):
- os.mkdir(os.path.join(self.repo.path, "foo"))
- with open(os.path.join(self.repo.path, "blah"), "w") as f:
- f.write("\n")
- with open(os.path.join(self.repo.path, "foo", "blie"), "w") as f:
- f.write("\n")
-
- cwd = os.getcwd()
- try:
- os.chdir(os.path.join(self.repo.path, "foo"))
- porcelain.add(repo=self.repo.path)
- porcelain.commit(
- repo=self.repo.path,
- message=b"test",
- author=b"test <email>",
- committer=b"test <email>",
- )
- finally:
- os.chdir(cwd)
-
- index = self.repo.open_index()
- self.assertEqual(sorted(index), [b"foo/blie"])
-
- def test_add_file(self):
- fullpath = os.path.join(self.repo.path, "foo")
- with open(fullpath, "w") as f:
- f.write("BAR")
- porcelain.add(self.repo.path, paths=[fullpath])
- self.assertIn(b"foo", self.repo.open_index())
-
- def test_add_ignored(self):
- with open(os.path.join(self.repo.path, ".gitignore"), "w") as f:
- f.write("foo\nsubdir/")
- with open(os.path.join(self.repo.path, "foo"), "w") as f:
- f.write("BAR")
- with open(os.path.join(self.repo.path, "bar"), "w") as f:
- f.write("BAR")
- os.mkdir(os.path.join(self.repo.path, "subdir"))
- with open(os.path.join(self.repo.path, "subdir", "baz"), "w") as f:
- f.write("BAZ")
- (added, ignored) = porcelain.add(
- self.repo.path,
- paths=[
- os.path.join(self.repo.path, "foo"),
- os.path.join(self.repo.path, "bar"),
- os.path.join(self.repo.path, "subdir"),
- ],
- )
- self.assertIn(b"bar", self.repo.open_index())
- self.assertEqual({"bar"}, set(added))
- self.assertEqual({"foo", os.path.join("subdir", "")}, ignored)
-
- def test_add_file_absolute_path(self):
- # Absolute paths are (not yet) supported
- with open(os.path.join(self.repo.path, "foo"), "w") as f:
- f.write("BAR")
- porcelain.add(self.repo, paths=[os.path.join(self.repo.path, "foo")])
- self.assertIn(b"foo", self.repo.open_index())
-
- def test_add_not_in_repo(self):
- with open(os.path.join(self.test_dir, "foo"), "w") as f:
- f.write("BAR")
- self.assertRaises(
- ValueError,
- porcelain.add,
- self.repo,
- paths=[os.path.join(self.test_dir, "foo")],
- )
- self.assertRaises(
- (ValueError, FileNotFoundError),
- porcelain.add,
- self.repo,
- paths=["../foo"],
- )
- self.assertEqual([], list(self.repo.open_index()))
-
- def test_add_file_clrf_conversion(self):
- # Set the right configuration to the repo
- c = self.repo.get_config()
- c.set("core", "autocrlf", "input")
- c.write_to_path()
-
- # Add a file with CRLF line-ending
- fullpath = os.path.join(self.repo.path, "foo")
- with open(fullpath, "wb") as f:
- f.write(b"line1\r\nline2")
- porcelain.add(self.repo.path, paths=[fullpath])
-
- # The line-endings should have been converted to LF
- index = self.repo.open_index()
- self.assertIn(b"foo", index)
-
- entry = index[b"foo"]
- blob = self.repo[entry.sha]
- self.assertEqual(blob.data, b"line1\nline2")
-
-
-class RemoveTests(PorcelainTestCase):
- def test_remove_file(self):
- fullpath = os.path.join(self.repo.path, "foo")
- with open(fullpath, "w") as f:
- f.write("BAR")
- porcelain.add(self.repo.path, paths=[fullpath])
- porcelain.commit(
- repo=self.repo,
- message=b"test",
- author=b"test <email>",
- committer=b"test <email>",
- )
- self.assertTrue(os.path.exists(os.path.join(self.repo.path, "foo")))
- cwd = os.getcwd()
- try:
- os.chdir(self.repo.path)
- porcelain.remove(self.repo.path, paths=["foo"])
- finally:
- os.chdir(cwd)
- self.assertFalse(os.path.exists(os.path.join(self.repo.path, "foo")))
-
- def test_remove_file_staged(self):
- fullpath = os.path.join(self.repo.path, "foo")
- with open(fullpath, "w") as f:
- f.write("BAR")
- cwd = os.getcwd()
- try:
- os.chdir(self.repo.path)
- porcelain.add(self.repo.path, paths=[fullpath])
- self.assertRaises(Exception, porcelain.rm, self.repo.path, paths=["foo"])
- finally:
- os.chdir(cwd)
-
- def test_remove_file_removed_on_disk(self):
- fullpath = os.path.join(self.repo.path, "foo")
- with open(fullpath, "w") as f:
- f.write("BAR")
- porcelain.add(self.repo.path, paths=[fullpath])
- cwd = os.getcwd()
- try:
- os.chdir(self.repo.path)
- os.remove(fullpath)
- porcelain.remove(self.repo.path, paths=["foo"])
- finally:
- os.chdir(cwd)
- self.assertFalse(os.path.exists(os.path.join(self.repo.path, "foo")))
-
-
-class LogTests(PorcelainTestCase):
- def test_simple(self):
- c1, c2, c3 = build_commit_graph(
- self.repo.object_store, [[1], [2, 1], [3, 1, 2]]
- )
- self.repo.refs[b"HEAD"] = c3.id
- outstream = StringIO()
- porcelain.log(self.repo.path, outstream=outstream)
- self.assertEqual(3, outstream.getvalue().count("-" * 50))
-
- def test_max_entries(self):
- c1, c2, c3 = build_commit_graph(
- self.repo.object_store, [[1], [2, 1], [3, 1, 2]]
- )
- self.repo.refs[b"HEAD"] = c3.id
- outstream = StringIO()
- porcelain.log(self.repo.path, outstream=outstream, max_entries=1)
- self.assertEqual(1, outstream.getvalue().count("-" * 50))
-
-
-class ShowTests(PorcelainTestCase):
- def test_nolist(self):
- c1, c2, c3 = build_commit_graph(
- self.repo.object_store, [[1], [2, 1], [3, 1, 2]]
- )
- self.repo.refs[b"HEAD"] = c3.id
- outstream = StringIO()
- porcelain.show(self.repo.path, objects=c3.id, outstream=outstream)
- self.assertTrue(outstream.getvalue().startswith("-" * 50))
-
- def test_simple(self):
- c1, c2, c3 = build_commit_graph(
- self.repo.object_store, [[1], [2, 1], [3, 1, 2]]
- )
- self.repo.refs[b"HEAD"] = c3.id
- outstream = StringIO()
- porcelain.show(self.repo.path, objects=[c3.id], outstream=outstream)
- self.assertTrue(outstream.getvalue().startswith("-" * 50))
-
- def test_blob(self):
- b = Blob.from_string(b"The Foo\n")
- self.repo.object_store.add_object(b)
- outstream = StringIO()
- porcelain.show(self.repo.path, objects=[b.id], outstream=outstream)
- self.assertEqual(outstream.getvalue(), "The Foo\n")
-
- def test_commit_no_parent(self):
- a = Blob.from_string(b"The Foo\n")
- ta = Tree()
- ta.add(b"somename", 0o100644, a.id)
- ca = make_commit(tree=ta.id)
- self.repo.object_store.add_objects([(a, None), (ta, None), (ca, None)])
- outstream = StringIO()
- porcelain.show(self.repo.path, objects=[ca.id], outstream=outstream)
- self.assertMultiLineEqual(
- outstream.getvalue(),
- """\
---------------------------------------------------
-commit: 344da06c1bb85901270b3e8875c988a027ec087d
-Author: Test Author <test@nodomain.com>
-Committer: Test Committer <test@nodomain.com>
-Date: Fri Jan 01 2010 00:00:00 +0000
-
-Test message.
-
-diff --git a/somename b/somename
-new file mode 100644
-index 0000000..ea5c7bf
---- /dev/null
-+++ b/somename
-@@ -0,0 +1 @@
-+The Foo
-""",
- )
-
- def test_tag(self):
- a = Blob.from_string(b"The Foo\n")
- ta = Tree()
- ta.add(b"somename", 0o100644, a.id)
- ca = make_commit(tree=ta.id)
- self.repo.object_store.add_objects([(a, None), (ta, None), (ca, None)])
- porcelain.tag_create(
- self.repo.path,
- b"tryme",
- b"foo <foo@bar.com>",
- b"bar",
- annotated=True,
- objectish=ca.id,
- tag_time=1552854211,
- tag_timezone=0,
- )
- outstream = StringIO()
- porcelain.show(self.repo, objects=[b"refs/tags/tryme"], outstream=outstream)
- self.maxDiff = None
- self.assertMultiLineEqual(
- outstream.getvalue(),
- """\
-Tagger: foo <foo@bar.com>
-Date: Sun Mar 17 2019 20:23:31 +0000
-
-bar
-
---------------------------------------------------
-commit: 344da06c1bb85901270b3e8875c988a027ec087d
-Author: Test Author <test@nodomain.com>
-Committer: Test Committer <test@nodomain.com>
-Date: Fri Jan 01 2010 00:00:00 +0000
-
-Test message.
-
-diff --git a/somename b/somename
-new file mode 100644
-index 0000000..ea5c7bf
---- /dev/null
-+++ b/somename
-@@ -0,0 +1 @@
-+The Foo
-""",
- )
-
- def test_commit_with_change(self):
- a = Blob.from_string(b"The Foo\n")
- ta = Tree()
- ta.add(b"somename", 0o100644, a.id)
- ca = make_commit(tree=ta.id)
- b = Blob.from_string(b"The Bar\n")
- tb = Tree()
- tb.add(b"somename", 0o100644, b.id)
- cb = make_commit(tree=tb.id, parents=[ca.id])
- self.repo.object_store.add_objects(
- [
- (a, None),
- (b, None),
- (ta, None),
- (tb, None),
- (ca, None),
- (cb, None),
- ]
- )
- outstream = StringIO()
- porcelain.show(self.repo.path, objects=[cb.id], outstream=outstream)
- self.assertMultiLineEqual(
- outstream.getvalue(),
- """\
---------------------------------------------------
-commit: 2c6b6c9cb72c130956657e1fdae58e5b103744fa
-Author: Test Author <test@nodomain.com>
-Committer: Test Committer <test@nodomain.com>
-Date: Fri Jan 01 2010 00:00:00 +0000
-
-Test message.
-
-diff --git a/somename b/somename
-index ea5c7bf..fd38bcb 100644
---- a/somename
-+++ b/somename
-@@ -1 +1 @@
--The Foo
-+The Bar
-""",
- )
-
-
-class SymbolicRefTests(PorcelainTestCase):
- def test_set_wrong_symbolic_ref(self):
- c1, c2, c3 = build_commit_graph(
- self.repo.object_store, [[1], [2, 1], [3, 1, 2]]
- )
- self.repo.refs[b"HEAD"] = c3.id
-
- self.assertRaises(
- porcelain.Error, porcelain.symbolic_ref, self.repo.path, b"foobar"
- )
-
- def test_set_force_wrong_symbolic_ref(self):
- c1, c2, c3 = build_commit_graph(
- self.repo.object_store, [[1], [2, 1], [3, 1, 2]]
- )
- self.repo.refs[b"HEAD"] = c3.id
-
- porcelain.symbolic_ref(self.repo.path, b"force_foobar", force=True)
-
- # test if we actually changed the file
- with self.repo.get_named_file("HEAD") as f:
- new_ref = f.read()
- self.assertEqual(new_ref, b"ref: refs/heads/force_foobar\n")
-
- def test_set_symbolic_ref(self):
- c1, c2, c3 = build_commit_graph(
- self.repo.object_store, [[1], [2, 1], [3, 1, 2]]
- )
- self.repo.refs[b"HEAD"] = c3.id
-
- porcelain.symbolic_ref(self.repo.path, b"master")
-
- def test_set_symbolic_ref_other_than_master(self):
- c1, c2, c3 = build_commit_graph(
- self.repo.object_store,
- [[1], [2, 1], [3, 1, 2]],
- attrs=dict(refs="develop"),
- )
- self.repo.refs[b"HEAD"] = c3.id
- self.repo.refs[b"refs/heads/develop"] = c3.id
-
- porcelain.symbolic_ref(self.repo.path, b"develop")
-
- # test if we actually changed the file
- with self.repo.get_named_file("HEAD") as f:
- new_ref = f.read()
- self.assertEqual(new_ref, b"ref: refs/heads/develop\n")
-
-
-class DiffTreeTests(PorcelainTestCase):
- def test_empty(self):
- c1, c2, c3 = build_commit_graph(
- self.repo.object_store, [[1], [2, 1], [3, 1, 2]]
- )
- self.repo.refs[b"HEAD"] = c3.id
- outstream = BytesIO()
- porcelain.diff_tree(self.repo.path, c2.tree, c3.tree, outstream=outstream)
- self.assertEqual(outstream.getvalue(), b"")
-
-
-class CommitTreeTests(PorcelainTestCase):
- def test_simple(self):
- c1, c2, c3 = build_commit_graph(
- self.repo.object_store, [[1], [2, 1], [3, 1, 2]]
- )
- b = Blob()
- b.data = b"foo the bar"
- t = Tree()
- t.add(b"somename", 0o100644, b.id)
- self.repo.object_store.add_object(t)
- self.repo.object_store.add_object(b)
- sha = porcelain.commit_tree(
- self.repo.path,
- t.id,
- message=b"Withcommit.",
- author=b"Joe <joe@example.com>",
- committer=b"Jane <jane@example.com>",
- )
- self.assertIsInstance(sha, bytes)
- self.assertEqual(len(sha), 40)
-
-
-class RevListTests(PorcelainTestCase):
- def test_simple(self):
- c1, c2, c3 = build_commit_graph(
- self.repo.object_store, [[1], [2, 1], [3, 1, 2]]
- )
- outstream = BytesIO()
- porcelain.rev_list(self.repo.path, [c3.id], outstream=outstream)
- self.assertEqual(
- c3.id + b"\n" + c2.id + b"\n" + c1.id + b"\n", outstream.getvalue()
- )
-
-
-@skipIf(
- platform.python_implementation() == "PyPy" or sys.platform == "win32",
- "gpgme not easily available or supported on Windows and PyPy",
-)
-class TagCreateSignTests(PorcelainGpgTestCase):
- def test_default_key(self):
- c1, c2, c3 = build_commit_graph(
- self.repo.object_store, [[1], [2, 1], [3, 1, 2]]
- )
- self.repo.refs[b"HEAD"] = c3.id
- cfg = self.repo.get_config()
- cfg.set(("user",), "signingKey", PorcelainGpgTestCase.DEFAULT_KEY_ID)
- self.import_default_key()
-
- porcelain.tag_create(
- self.repo.path,
- b"tryme",
- b"foo <foo@bar.com>",
- b"bar",
- annotated=True,
- sign=True,
- )
-
- tags = self.repo.refs.as_dict(b"refs/tags")
- self.assertEqual(list(tags.keys()), [b"tryme"])
- tag = self.repo[b"refs/tags/tryme"]
- self.assertIsInstance(tag, Tag)
- self.assertEqual(b"foo <foo@bar.com>", tag.tagger)
- self.assertEqual(b"bar\n", tag.message)
- self.assertRecentTimestamp(tag.tag_time)
- tag = self.repo[b"refs/tags/tryme"]
- # GPG Signatures aren't deterministic, so we can't do a static assertion.
- tag.verify()
- tag.verify(keyids=[PorcelainGpgTestCase.DEFAULT_KEY_ID])
-
- self.import_non_default_key()
- self.assertRaises(
- gpg.errors.MissingSignatures,
- tag.verify,
- keyids=[PorcelainGpgTestCase.NON_DEFAULT_KEY_ID],
- )
-
- tag._chunked_text = [b"bad data", tag._signature]
- self.assertRaises(
- gpg.errors.BadSignatures,
- tag.verify,
- )
-
- def test_non_default_key(self):
- c1, c2, c3 = build_commit_graph(
- self.repo.object_store, [[1], [2, 1], [3, 1, 2]]
- )
- self.repo.refs[b"HEAD"] = c3.id
- cfg = self.repo.get_config()
- cfg.set(("user",), "signingKey", PorcelainGpgTestCase.DEFAULT_KEY_ID)
- self.import_non_default_key()
-
- porcelain.tag_create(
- self.repo.path,
- b"tryme",
- b"foo <foo@bar.com>",
- b"bar",
- annotated=True,
- sign=PorcelainGpgTestCase.NON_DEFAULT_KEY_ID,
- )
-
- tags = self.repo.refs.as_dict(b"refs/tags")
- self.assertEqual(list(tags.keys()), [b"tryme"])
- tag = self.repo[b"refs/tags/tryme"]
- self.assertIsInstance(tag, Tag)
- self.assertEqual(b"foo <foo@bar.com>", tag.tagger)
- self.assertEqual(b"bar\n", tag.message)
- self.assertRecentTimestamp(tag.tag_time)
- tag = self.repo[b"refs/tags/tryme"]
- # GPG Signatures aren't deterministic, so we can't do a static assertion.
- tag.verify()
-
-
-class TagCreateTests(PorcelainTestCase):
- def test_annotated(self):
- c1, c2, c3 = build_commit_graph(
- self.repo.object_store, [[1], [2, 1], [3, 1, 2]]
- )
- self.repo.refs[b"HEAD"] = c3.id
-
- porcelain.tag_create(
- self.repo.path,
- b"tryme",
- b"foo <foo@bar.com>",
- b"bar",
- annotated=True,
- )
-
- tags = self.repo.refs.as_dict(b"refs/tags")
- self.assertEqual(list(tags.keys()), [b"tryme"])
- tag = self.repo[b"refs/tags/tryme"]
- self.assertIsInstance(tag, Tag)
- self.assertEqual(b"foo <foo@bar.com>", tag.tagger)
- self.assertEqual(b"bar\n", tag.message)
- self.assertRecentTimestamp(tag.tag_time)
-
- def test_unannotated(self):
- c1, c2, c3 = build_commit_graph(
- self.repo.object_store, [[1], [2, 1], [3, 1, 2]]
- )
- self.repo.refs[b"HEAD"] = c3.id
-
- porcelain.tag_create(self.repo.path, b"tryme", annotated=False)
-
- tags = self.repo.refs.as_dict(b"refs/tags")
- self.assertEqual(list(tags.keys()), [b"tryme"])
- self.repo[b"refs/tags/tryme"]
- self.assertEqual(list(tags.values()), [self.repo.head()])
-
- def test_unannotated_unicode(self):
- c1, c2, c3 = build_commit_graph(
- self.repo.object_store, [[1], [2, 1], [3, 1, 2]]
- )
- self.repo.refs[b"HEAD"] = c3.id
-
- porcelain.tag_create(self.repo.path, "tryme", annotated=False)
-
- tags = self.repo.refs.as_dict(b"refs/tags")
- self.assertEqual(list(tags.keys()), [b"tryme"])
- self.repo[b"refs/tags/tryme"]
- self.assertEqual(list(tags.values()), [self.repo.head()])
-
-
-class TagListTests(PorcelainTestCase):
- def test_empty(self):
- tags = porcelain.tag_list(self.repo.path)
- self.assertEqual([], tags)
-
- def test_simple(self):
- self.repo.refs[b"refs/tags/foo"] = b"aa" * 20
- self.repo.refs[b"refs/tags/bar/bla"] = b"bb" * 20
- tags = porcelain.tag_list(self.repo.path)
-
- self.assertEqual([b"bar/bla", b"foo"], tags)
-
-
-class TagDeleteTests(PorcelainTestCase):
- def test_simple(self):
- [c1] = build_commit_graph(self.repo.object_store, [[1]])
- self.repo[b"HEAD"] = c1.id
- porcelain.tag_create(self.repo, b"foo")
- self.assertIn(b"foo", porcelain.tag_list(self.repo))
- porcelain.tag_delete(self.repo, b"foo")
- self.assertNotIn(b"foo", porcelain.tag_list(self.repo))
-
-
-class ResetTests(PorcelainTestCase):
- def test_hard_head(self):
- fullpath = os.path.join(self.repo.path, "foo")
- with open(fullpath, "w") as f:
- f.write("BAR")
- porcelain.add(self.repo.path, paths=[fullpath])
- porcelain.commit(
- self.repo.path,
- message=b"Some message",
- committer=b"Jane <jane@example.com>",
- author=b"John <john@example.com>",
- )
-
- with open(os.path.join(self.repo.path, "foo"), "wb") as f:
- f.write(b"OOH")
-
- porcelain.reset(self.repo, "hard", b"HEAD")
-
- index = self.repo.open_index()
- changes = list(
- tree_changes(
- self.repo,
- index.commit(self.repo.object_store),
- self.repo[b"HEAD"].tree,
- )
- )
-
- self.assertEqual([], changes)
-
- def test_hard_commit(self):
- fullpath = os.path.join(self.repo.path, "foo")
- with open(fullpath, "w") as f:
- f.write("BAR")
- porcelain.add(self.repo.path, paths=[fullpath])
- sha = porcelain.commit(
- self.repo.path,
- message=b"Some message",
- committer=b"Jane <jane@example.com>",
- author=b"John <john@example.com>",
- )
-
- with open(fullpath, "wb") as f:
- f.write(b"BAZ")
- porcelain.add(self.repo.path, paths=[fullpath])
- porcelain.commit(
- self.repo.path,
- message=b"Some other message",
- committer=b"Jane <jane@example.com>",
- author=b"John <john@example.com>",
- )
-
- porcelain.reset(self.repo, "hard", sha)
-
- index = self.repo.open_index()
- changes = list(
- tree_changes(
- self.repo,
- index.commit(self.repo.object_store),
- self.repo[sha].tree,
- )
- )
-
- self.assertEqual([], changes)
-
-
-class ResetFileTests(PorcelainTestCase):
- def test_reset_modify_file_to_commit(self):
- file = "foo"
- full_path = os.path.join(self.repo.path, file)
-
- with open(full_path, "w") as f:
- f.write("hello")
- porcelain.add(self.repo, paths=[full_path])
- sha = porcelain.commit(
- self.repo,
- message=b"unitest",
- committer=b"Jane <jane@example.com>",
- author=b"John <john@example.com>",
- )
- with open(full_path, "a") as f:
- f.write("something new")
- porcelain.reset_file(self.repo, file, target=sha)
-
- with open(full_path) as f:
- self.assertEqual("hello", f.read())
-
- def test_reset_remove_file_to_commit(self):
- file = "foo"
- full_path = os.path.join(self.repo.path, file)
-
- with open(full_path, "w") as f:
- f.write("hello")
- porcelain.add(self.repo, paths=[full_path])
- sha = porcelain.commit(
- self.repo,
- message=b"unitest",
- committer=b"Jane <jane@example.com>",
- author=b"John <john@example.com>",
- )
- os.remove(full_path)
- porcelain.reset_file(self.repo, file, target=sha)
-
- with open(full_path) as f:
- self.assertEqual("hello", f.read())
-
- def test_resetfile_with_dir(self):
- os.mkdir(os.path.join(self.repo.path, "new_dir"))
- full_path = os.path.join(self.repo.path, "new_dir", "foo")
-
- with open(full_path, "w") as f:
- f.write("hello")
- porcelain.add(self.repo, paths=[full_path])
- sha = porcelain.commit(
- self.repo,
- message=b"unitest",
- committer=b"Jane <jane@example.com>",
- author=b"John <john@example.com>",
- )
- with open(full_path, "a") as f:
- f.write("something new")
- porcelain.commit(
- self.repo,
- message=b"unitest 2",
- committer=b"Jane <jane@example.com>",
- author=b"John <john@example.com>",
- )
- porcelain.reset_file(self.repo, os.path.join("new_dir", "foo"), target=sha)
-
- with open(full_path) as f:
- self.assertEqual("hello", f.read())
-
-
-def _commit_file_with_content(repo, filename, content):
- file_path = os.path.join(repo.path, filename)
-
- with open(file_path, "w") as f:
- f.write(content)
- porcelain.add(repo, paths=[file_path])
- sha = porcelain.commit(
- repo,
- message=b"add " + filename.encode(),
- committer=b"Jane <jane@example.com>",
- author=b"John <john@example.com>",
- )
-
- return sha, file_path
-
-
-class CheckoutTests(PorcelainTestCase):
- def setUp(self):
- super().setUp()
- self._sha, self._foo_path = _commit_file_with_content(
- self.repo, "foo", "hello\n"
- )
- porcelain.branch_create(self.repo, "uni")
-
- def test_checkout_to_existing_branch(self):
- self.assertEqual(b"master", porcelain.active_branch(self.repo))
- porcelain.checkout_branch(self.repo, b"uni")
- self.assertEqual(b"uni", porcelain.active_branch(self.repo))
-
- def test_checkout_to_non_existing_branch(self):
- self.assertEqual(b"master", porcelain.active_branch(self.repo))
-
- with self.assertRaises(KeyError):
- porcelain.checkout_branch(self.repo, b"bob")
-
- self.assertEqual(b"master", porcelain.active_branch(self.repo))
-
- def test_checkout_to_branch_with_modified_files(self):
- with open(self._foo_path, "a") as f:
- f.write("new message\n")
- porcelain.add(self.repo, paths=[self._foo_path])
-
- status = list(porcelain.status(self.repo))
- self.assertEqual(
- [{"add": [], "delete": [], "modify": [b"foo"]}, [], []], status
- )
-
- # Both branches have file 'foo' checkout should be fine.
- porcelain.checkout_branch(self.repo, b"uni")
- self.assertEqual(b"uni", porcelain.active_branch(self.repo))
-
- status = list(porcelain.status(self.repo))
- self.assertEqual(
- [{"add": [], "delete": [], "modify": [b"foo"]}, [], []], status
- )
-
- def test_checkout_with_deleted_files(self):
- porcelain.remove(self.repo.path, [os.path.join(self.repo.path, "foo")])
- status = list(porcelain.status(self.repo))
- self.assertEqual(
- [{"add": [], "delete": [b"foo"], "modify": []}, [], []], status
- )
-
- # Both branches have file 'foo' checkout should be fine.
- porcelain.checkout_branch(self.repo, b"uni")
- self.assertEqual(b"uni", porcelain.active_branch(self.repo))
-
- status = list(porcelain.status(self.repo))
- self.assertEqual(
- [{"add": [], "delete": [b"foo"], "modify": []}, [], []], status
- )
-
- def test_checkout_to_branch_with_added_files(self):
- file_path = os.path.join(self.repo.path, "bar")
-
- with open(file_path, "w") as f:
- f.write("bar content\n")
- porcelain.add(self.repo, paths=[file_path])
- status = list(porcelain.status(self.repo))
- self.assertEqual(
- [{"add": [b"bar"], "delete": [], "modify": []}, [], []], status
- )
-
- # Both branches have file 'foo' checkout should be fine.
- porcelain.checkout_branch(self.repo, b"uni")
- self.assertEqual(b"uni", porcelain.active_branch(self.repo))
-
- status = list(porcelain.status(self.repo))
- self.assertEqual(
- [{"add": [b"bar"], "delete": [], "modify": []}, [], []], status
- )
-
- def test_checkout_to_branch_with_modified_file_not_present(self):
- # Commit a new file that the other branch doesn't have.
- _, nee_path = _commit_file_with_content(self.repo, "nee", "Good content\n")
-
- # Modify the file the other branch doesn't have.
- with open(nee_path, "a") as f:
- f.write("bar content\n")
- porcelain.add(self.repo, paths=[nee_path])
- status = list(porcelain.status(self.repo))
- self.assertEqual(
- [{"add": [], "delete": [], "modify": [b"nee"]}, [], []], status
- )
-
- # 'uni' branch doesn't have 'nee' and it has been modified, should result in the checkout being aborted.
- with self.assertRaises(CheckoutError):
- porcelain.checkout_branch(self.repo, b"uni")
-
- self.assertEqual(b"master", porcelain.active_branch(self.repo))
-
- status = list(porcelain.status(self.repo))
- self.assertEqual(
- [{"add": [], "delete": [], "modify": [b"nee"]}, [], []], status
- )
-
- def test_checkout_to_branch_with_modified_file_not_present_forced(self):
- # Commit a new file that the other branch doesn't have.
- _, nee_path = _commit_file_with_content(self.repo, "nee", "Good content\n")
-
- # Modify the file the other branch doesn't have.
- with open(nee_path, "a") as f:
- f.write("bar content\n")
- porcelain.add(self.repo, paths=[nee_path])
- status = list(porcelain.status(self.repo))
- self.assertEqual(
- [{"add": [], "delete": [], "modify": [b"nee"]}, [], []], status
- )
-
- # 'uni' branch doesn't have 'nee' and it has been modified, but we force to reset the entire index.
- porcelain.checkout_branch(self.repo, b"uni", force=True)
-
- self.assertEqual(b"uni", porcelain.active_branch(self.repo))
-
- status = list(porcelain.status(self.repo))
- self.assertEqual([{"add": [], "delete": [], "modify": []}, [], []], status)
-
- def test_checkout_to_branch_with_unstaged_files(self):
- # Edit `foo`.
- with open(self._foo_path, "a") as f:
- f.write("new message")
-
- status = list(porcelain.status(self.repo))
- self.assertEqual(
- [{"add": [], "delete": [], "modify": []}, [b"foo"], []], status
- )
-
- porcelain.checkout_branch(self.repo, b"uni")
-
- status = list(porcelain.status(self.repo))
- self.assertEqual(
- [{"add": [], "delete": [], "modify": []}, [b"foo"], []], status
- )
-
- def test_checkout_to_branch_with_untracked_files(self):
- with open(os.path.join(self.repo.path, "neu"), "a") as f:
- f.write("new message\n")
-
- status = list(porcelain.status(self.repo))
- self.assertEqual([{"add": [], "delete": [], "modify": []}, [], ["neu"]], status)
-
- porcelain.checkout_branch(self.repo, b"uni")
-
- status = list(porcelain.status(self.repo))
- self.assertEqual([{"add": [], "delete": [], "modify": []}, [], ["neu"]], status)
-
- def test_checkout_to_branch_with_new_files(self):
- porcelain.checkout_branch(self.repo, b"uni")
- sub_directory = os.path.join(self.repo.path, "sub1")
- os.mkdir(sub_directory)
- for index in range(5):
- _commit_file_with_content(
- self.repo, "new_file_" + str(index + 1), "Some content\n"
- )
- _commit_file_with_content(
- self.repo,
- os.path.join("sub1", "new_file_" + str(index + 10)),
- "Good content\n",
- )
-
- status = list(porcelain.status(self.repo))
- self.assertEqual([{"add": [], "delete": [], "modify": []}, [], []], status)
-
- porcelain.checkout_branch(self.repo, b"master")
- self.assertEqual(b"master", porcelain.active_branch(self.repo))
- status = list(porcelain.status(self.repo))
- self.assertEqual([{"add": [], "delete": [], "modify": []}, [], []], status)
-
- porcelain.checkout_branch(self.repo, b"uni")
- self.assertEqual(b"uni", porcelain.active_branch(self.repo))
- status = list(porcelain.status(self.repo))
- self.assertEqual([{"add": [], "delete": [], "modify": []}, [], []], status)
-
- def test_checkout_to_branch_with_file_in_sub_directory(self):
- sub_directory = os.path.join(self.repo.path, "sub1", "sub2")
- os.makedirs(sub_directory)
-
- sub_directory_file = os.path.join(sub_directory, "neu")
- with open(sub_directory_file, "w") as f:
- f.write("new message\n")
-
- porcelain.add(self.repo, paths=[sub_directory_file])
- porcelain.commit(
- self.repo,
- message=b"add " + sub_directory_file.encode(),
- committer=b"Jane <jane@example.com>",
- author=b"John <john@example.com>",
- )
- status = list(porcelain.status(self.repo))
- self.assertEqual([{"add": [], "delete": [], "modify": []}, [], []], status)
-
- self.assertTrue(os.path.isdir(sub_directory))
- self.assertTrue(os.path.isdir(os.path.dirname(sub_directory)))
-
- porcelain.checkout_branch(self.repo, b"uni")
-
- status = list(porcelain.status(self.repo))
- self.assertEqual([{"add": [], "delete": [], "modify": []}, [], []], status)
-
- self.assertFalse(os.path.isdir(sub_directory))
- self.assertFalse(os.path.isdir(os.path.dirname(sub_directory)))
-
- porcelain.checkout_branch(self.repo, b"master")
-
- self.assertTrue(os.path.isdir(sub_directory))
- self.assertTrue(os.path.isdir(os.path.dirname(sub_directory)))
-
- def test_checkout_to_branch_with_multiple_files_in_sub_directory(self):
- sub_directory = os.path.join(self.repo.path, "sub1", "sub2")
- os.makedirs(sub_directory)
-
- sub_directory_file_1 = os.path.join(sub_directory, "neu")
- with open(sub_directory_file_1, "w") as f:
- f.write("new message\n")
-
- sub_directory_file_2 = os.path.join(sub_directory, "gus")
- with open(sub_directory_file_2, "w") as f:
- f.write("alternative message\n")
-
- porcelain.add(self.repo, paths=[sub_directory_file_1, sub_directory_file_2])
- porcelain.commit(
- self.repo,
- message=b"add files neu and gus.",
- committer=b"Jane <jane@example.com>",
- author=b"John <john@example.com>",
- )
- status = list(porcelain.status(self.repo))
- self.assertEqual([{"add": [], "delete": [], "modify": []}, [], []], status)
-
- self.assertTrue(os.path.isdir(sub_directory))
- self.assertTrue(os.path.isdir(os.path.dirname(sub_directory)))
-
- porcelain.checkout_branch(self.repo, b"uni")
-
- status = list(porcelain.status(self.repo))
- self.assertEqual([{"add": [], "delete": [], "modify": []}, [], []], status)
-
- self.assertFalse(os.path.isdir(sub_directory))
- self.assertFalse(os.path.isdir(os.path.dirname(sub_directory)))
-
- def _commit_something_wrong(self):
- with open(self._foo_path, "a") as f:
- f.write("something wrong")
-
- porcelain.add(self.repo, paths=[self._foo_path])
- return porcelain.commit(
- self.repo,
- message=b"I may added something wrong",
- committer=b"Jane <jane@example.com>",
- author=b"John <john@example.com>",
- )
-
- def test_checkout_to_commit_sha(self):
- self._commit_something_wrong()
-
- porcelain.checkout_branch(self.repo, self._sha)
- self.assertEqual(self._sha, self.repo.head())
-
- def test_checkout_to_head(self):
- new_sha = self._commit_something_wrong()
-
- porcelain.checkout_branch(self.repo, b"HEAD")
- self.assertEqual(new_sha, self.repo.head())
-
- def _checkout_remote_branch(self):
- errstream = BytesIO()
- outstream = BytesIO()
-
- porcelain.commit(
- repo=self.repo.path,
- message=b"init",
- author=b"author <email>",
- committer=b"committer <email>",
- )
-
- # Setup target repo cloned from temp test repo
- clone_path = tempfile.mkdtemp()
- self.addCleanup(shutil.rmtree, clone_path)
- target_repo = porcelain.clone(
- self.repo.path, target=clone_path, errstream=errstream
- )
- try:
- self.assertEqual(target_repo[b"HEAD"], self.repo[b"HEAD"])
- finally:
- target_repo.close()
-
- # create a second file to be pushed back to origin
- handle, fullpath = tempfile.mkstemp(dir=clone_path)
- os.close(handle)
- porcelain.add(repo=clone_path, paths=[fullpath])
- porcelain.commit(
- repo=clone_path,
- message=b"push",
- author=b"author <email>",
- committer=b"committer <email>",
- )
-
- # Setup a non-checked out branch in the remote
- refs_path = b"refs/heads/foo"
- new_id = self.repo[b"HEAD"].id
- self.assertNotEqual(new_id, ZERO_SHA)
- self.repo.refs[refs_path] = new_id
-
- # Push to the remote
- porcelain.push(
- clone_path,
- "origin",
- b"HEAD:" + refs_path,
- outstream=outstream,
- errstream=errstream,
- )
-
- self.assertEqual(
- target_repo.refs[b"refs/remotes/origin/foo"],
- target_repo.refs[b"HEAD"],
- )
-
- porcelain.checkout_branch(target_repo, b"origin/foo")
- original_id = target_repo[b"HEAD"].id
- uni_id = target_repo[b"refs/remotes/origin/uni"].id
-
- expected_refs = {
- b"HEAD": original_id,
- b"refs/heads/master": original_id,
- b"refs/heads/foo": original_id,
- b"refs/remotes/origin/foo": original_id,
- b"refs/remotes/origin/uni": uni_id,
- b"refs/remotes/origin/HEAD": new_id,
- b"refs/remotes/origin/master": new_id,
- }
- self.assertEqual(expected_refs, target_repo.get_refs())
-
- return target_repo
-
- def test_checkout_remote_branch(self):
- repo = self._checkout_remote_branch()
- repo.close()
-
- def test_checkout_remote_branch_then_master_then_remote_branch_again(self):
- target_repo = self._checkout_remote_branch()
- self.assertEqual(b"foo", porcelain.active_branch(target_repo))
- _commit_file_with_content(target_repo, "bar", "something\n")
- self.assertTrue(os.path.isfile(os.path.join(target_repo.path, "bar")))
-
- porcelain.checkout_branch(target_repo, b"master")
-
- self.assertEqual(b"master", porcelain.active_branch(target_repo))
- self.assertFalse(os.path.isfile(os.path.join(target_repo.path, "bar")))
-
- porcelain.checkout_branch(target_repo, b"origin/foo")
-
- self.assertEqual(b"foo", porcelain.active_branch(target_repo))
- self.assertTrue(os.path.isfile(os.path.join(target_repo.path, "bar")))
-
- target_repo.close()
-
-
-class SubmoduleTests(PorcelainTestCase):
- def test_empty(self):
- porcelain.commit(
- repo=self.repo.path,
- message=b"init",
- author=b"author <email>",
- committer=b"committer <email>",
- )
-
- self.assertEqual([], list(porcelain.submodule_list(self.repo)))
-
- def test_add(self):
- porcelain.submodule_add(self.repo, "../bar.git", "bar")
- with open("%s/.gitmodules" % self.repo.path) as f:
- self.assertEqual(
- """\
-[submodule "bar"]
-\turl = ../bar.git
-\tpath = bar
-""",
- f.read(),
- )
-
- def test_init(self):
- porcelain.submodule_add(self.repo, "../bar.git", "bar")
- porcelain.submodule_init(self.repo)
-
-
-class PushTests(PorcelainTestCase):
- def test_simple(self):
- """Basic test of porcelain push where self.repo is the remote. First
- clone the remote, commit a file to the clone, then push the changes
- back to the remote.
- """
- outstream = BytesIO()
- errstream = BytesIO()
-
- porcelain.commit(
- repo=self.repo.path,
- message=b"init",
- author=b"author <email>",
- committer=b"committer <email>",
- )
-
- # Setup target repo cloned from temp test repo
- clone_path = tempfile.mkdtemp()
- self.addCleanup(shutil.rmtree, clone_path)
- target_repo = porcelain.clone(
- self.repo.path, target=clone_path, errstream=errstream
- )
- try:
- self.assertEqual(target_repo[b"HEAD"], self.repo[b"HEAD"])
- finally:
- target_repo.close()
-
- # create a second file to be pushed back to origin
- handle, fullpath = tempfile.mkstemp(dir=clone_path)
- os.close(handle)
- porcelain.add(repo=clone_path, paths=[fullpath])
- porcelain.commit(
- repo=clone_path,
- message=b"push",
- author=b"author <email>",
- committer=b"committer <email>",
- )
-
- # Setup a non-checked out branch in the remote
- refs_path = b"refs/heads/foo"
- new_id = self.repo[b"HEAD"].id
- self.assertNotEqual(new_id, ZERO_SHA)
- self.repo.refs[refs_path] = new_id
-
- # Push to the remote
- porcelain.push(
- clone_path,
- "origin",
- b"HEAD:" + refs_path,
- outstream=outstream,
- errstream=errstream,
- )
-
- self.assertEqual(
- target_repo.refs[b"refs/remotes/origin/foo"],
- target_repo.refs[b"HEAD"],
- )
-
- # Check that the target and source
- with Repo(clone_path) as r_clone:
- self.assertEqual(
- {
- b"HEAD": new_id,
- b"refs/heads/foo": r_clone[b"HEAD"].id,
- b"refs/heads/master": new_id,
- },
- self.repo.get_refs(),
- )
- self.assertEqual(r_clone[b"HEAD"].id, self.repo[refs_path].id)
-
- # Get the change in the target repo corresponding to the add
- # this will be in the foo branch.
- change = next(
- iter(
- tree_changes(
- self.repo,
- self.repo[b"HEAD"].tree,
- self.repo[b"refs/heads/foo"].tree,
- )
- )
- )
- self.assertEqual(
- os.path.basename(fullpath), change.new.path.decode("ascii")
- )
-
- def test_local_missing(self):
- """Pushing a new branch."""
- outstream = BytesIO()
- errstream = BytesIO()
-
- # Setup target repo cloned from temp test repo
- clone_path = tempfile.mkdtemp()
- self.addCleanup(shutil.rmtree, clone_path)
- target_repo = porcelain.init(clone_path)
- target_repo.close()
-
- self.assertRaises(
- porcelain.Error,
- porcelain.push,
- self.repo,
- clone_path,
- b"HEAD:refs/heads/master",
- outstream=outstream,
- errstream=errstream,
- )
-
- def test_new(self):
- """Pushing a new branch."""
- outstream = BytesIO()
- errstream = BytesIO()
-
- # Setup target repo cloned from temp test repo
- clone_path = tempfile.mkdtemp()
- self.addCleanup(shutil.rmtree, clone_path)
- target_repo = porcelain.init(clone_path)
- target_repo.close()
-
- # create a second file to be pushed back to origin
- handle, fullpath = tempfile.mkstemp(dir=clone_path)
- os.close(handle)
- porcelain.add(repo=clone_path, paths=[fullpath])
- new_id = porcelain.commit(
- repo=self.repo,
- message=b"push",
- author=b"author <email>",
- committer=b"committer <email>",
- )
-
- # Push to the remote
- porcelain.push(
- self.repo,
- clone_path,
- b"HEAD:refs/heads/master",
- outstream=outstream,
- errstream=errstream,
- )
-
- with Repo(clone_path) as r_clone:
- self.assertEqual(
- {
- b"HEAD": new_id,
- b"refs/heads/master": new_id,
- },
- r_clone.get_refs(),
- )
-
- def test_delete(self):
- """Basic test of porcelain push, removing a branch."""
- outstream = BytesIO()
- errstream = BytesIO()
-
- porcelain.commit(
- repo=self.repo.path,
- message=b"init",
- author=b"author <email>",
- committer=b"committer <email>",
- )
-
- # Setup target repo cloned from temp test repo
- clone_path = tempfile.mkdtemp()
- self.addCleanup(shutil.rmtree, clone_path)
- target_repo = porcelain.clone(
- self.repo.path, target=clone_path, errstream=errstream
- )
- target_repo.close()
-
- # Setup a non-checked out branch in the remote
- refs_path = b"refs/heads/foo"
- new_id = self.repo[b"HEAD"].id
- self.assertNotEqual(new_id, ZERO_SHA)
- self.repo.refs[refs_path] = new_id
-
- # Push to the remote
- porcelain.push(
- clone_path,
- self.repo.path,
- b":" + refs_path,
- outstream=outstream,
- errstream=errstream,
- )
-
- self.assertEqual(
- {
- b"HEAD": new_id,
- b"refs/heads/master": new_id,
- },
- self.repo.get_refs(),
- )
-
- def test_diverged(self):
- outstream = BytesIO()
- errstream = BytesIO()
-
- porcelain.commit(
- repo=self.repo.path,
- message=b"init",
- author=b"author <email>",
- committer=b"committer <email>",
- )
-
- # Setup target repo cloned from temp test repo
- clone_path = tempfile.mkdtemp()
- self.addCleanup(shutil.rmtree, clone_path)
- target_repo = porcelain.clone(
- self.repo.path, target=clone_path, errstream=errstream
- )
- target_repo.close()
-
- remote_id = porcelain.commit(
- repo=self.repo.path,
- message=b"remote change",
- author=b"author <email>",
- committer=b"committer <email>",
- )
-
- local_id = porcelain.commit(
- repo=clone_path,
- message=b"local change",
- author=b"author <email>",
- committer=b"committer <email>",
- )
-
- outstream = BytesIO()
- errstream = BytesIO()
-
- # Push to the remote
- self.assertRaises(
- porcelain.DivergedBranches,
- porcelain.push,
- clone_path,
- self.repo.path,
- b"refs/heads/master",
- outstream=outstream,
- errstream=errstream,
- )
-
- self.assertEqual(
- {
- b"HEAD": remote_id,
- b"refs/heads/master": remote_id,
- },
- self.repo.get_refs(),
- )
-
- self.assertEqual(b"", outstream.getvalue())
- self.assertEqual(b"", errstream.getvalue())
-
- outstream = BytesIO()
- errstream = BytesIO()
-
- # Push to the remote with --force
- porcelain.push(
- clone_path,
- self.repo.path,
- b"refs/heads/master",
- outstream=outstream,
- errstream=errstream,
- force=True,
- )
-
- self.assertEqual(
- {
- b"HEAD": local_id,
- b"refs/heads/master": local_id,
- },
- self.repo.get_refs(),
- )
-
- self.assertEqual(b"", outstream.getvalue())
- self.assertTrue(re.match(b"Push to .* successful.\n", errstream.getvalue()))
-
-
-class PullTests(PorcelainTestCase):
- def setUp(self):
- super().setUp()
- # create a file for initial commit
- handle, fullpath = tempfile.mkstemp(dir=self.repo.path)
- os.close(handle)
- porcelain.add(repo=self.repo.path, paths=fullpath)
- porcelain.commit(
- repo=self.repo.path,
- message=b"test",
- author=b"test <email>",
- committer=b"test <email>",
- )
-
- # Setup target repo
- self.target_path = tempfile.mkdtemp()
- self.addCleanup(shutil.rmtree, self.target_path)
- target_repo = porcelain.clone(
- self.repo.path, target=self.target_path, errstream=BytesIO()
- )
- target_repo.close()
-
- # create a second file to be pushed
- handle, fullpath = tempfile.mkstemp(dir=self.repo.path)
- os.close(handle)
- porcelain.add(repo=self.repo.path, paths=fullpath)
- porcelain.commit(
- repo=self.repo.path,
- message=b"test2",
- author=b"test2 <email>",
- committer=b"test2 <email>",
- )
-
- self.assertIn(b"refs/heads/master", self.repo.refs)
- self.assertIn(b"refs/heads/master", target_repo.refs)
-
- def test_simple(self):
- outstream = BytesIO()
- errstream = BytesIO()
-
- # Pull changes into the cloned repo
- porcelain.pull(
- self.target_path,
- self.repo.path,
- b"refs/heads/master",
- outstream=outstream,
- errstream=errstream,
- )
-
- # Check the target repo for pushed changes
- with Repo(self.target_path) as r:
- self.assertEqual(r[b"HEAD"].id, self.repo[b"HEAD"].id)
-
- def test_diverged(self):
- outstream = BytesIO()
- errstream = BytesIO()
-
- c3a = porcelain.commit(
- repo=self.target_path,
- message=b"test3a",
- author=b"test2 <email>",
- committer=b"test2 <email>",
- )
-
- porcelain.commit(
- repo=self.repo.path,
- message=b"test3b",
- author=b"test2 <email>",
- committer=b"test2 <email>",
- )
-
- # Pull changes into the cloned repo
- self.assertRaises(
- porcelain.DivergedBranches,
- porcelain.pull,
- self.target_path,
- self.repo.path,
- b"refs/heads/master",
- outstream=outstream,
- errstream=errstream,
- )
-
- # Check the target repo for pushed changes
- with Repo(self.target_path) as r:
- self.assertEqual(r[b"refs/heads/master"].id, c3a)
-
- self.assertRaises(
- NotImplementedError,
- porcelain.pull,
- self.target_path,
- self.repo.path,
- b"refs/heads/master",
- outstream=outstream,
- errstream=errstream,
- fast_forward=False,
- )
-
- # Check the target repo for pushed changes
- with Repo(self.target_path) as r:
- self.assertEqual(r[b"refs/heads/master"].id, c3a)
-
- def test_no_refspec(self):
- outstream = BytesIO()
- errstream = BytesIO()
-
- # Pull changes into the cloned repo
- porcelain.pull(
- self.target_path,
- self.repo.path,
- outstream=outstream,
- errstream=errstream,
- )
-
- # Check the target repo for pushed changes
- with Repo(self.target_path) as r:
- self.assertEqual(r[b"HEAD"].id, self.repo[b"HEAD"].id)
-
- def test_no_remote_location(self):
- outstream = BytesIO()
- errstream = BytesIO()
-
- # Pull changes into the cloned repo
- porcelain.pull(
- self.target_path,
- refspecs=b"refs/heads/master",
- outstream=outstream,
- errstream=errstream,
- )
-
- # Check the target repo for pushed changes
- with Repo(self.target_path) as r:
- self.assertEqual(r[b"HEAD"].id, self.repo[b"HEAD"].id)
-
-
-class StatusTests(PorcelainTestCase):
- def test_empty(self):
- results = porcelain.status(self.repo)
- self.assertEqual({"add": [], "delete": [], "modify": []}, results.staged)
- self.assertEqual([], results.unstaged)
-
- def test_status_base(self):
- """Integration test for `status` functionality."""
- # Commit a dummy file then modify it
- fullpath = os.path.join(self.repo.path, "foo")
- with open(fullpath, "w") as f:
- f.write("origstuff")
-
- porcelain.add(repo=self.repo.path, paths=[fullpath])
- porcelain.commit(
- repo=self.repo.path,
- message=b"test status",
- author=b"author <email>",
- committer=b"committer <email>",
- )
-
- # modify access and modify time of path
- os.utime(fullpath, (0, 0))
-
- with open(fullpath, "wb") as f:
- f.write(b"stuff")
-
- # Make a dummy file and stage it
- filename_add = "bar"
- fullpath = os.path.join(self.repo.path, filename_add)
- with open(fullpath, "w") as f:
- f.write("stuff")
- porcelain.add(repo=self.repo.path, paths=fullpath)
-
- results = porcelain.status(self.repo)
-
- self.assertEqual(results.staged["add"][0], filename_add.encode("ascii"))
- self.assertEqual(results.unstaged, [b"foo"])
-
- def test_status_all(self):
- del_path = os.path.join(self.repo.path, "foo")
- mod_path = os.path.join(self.repo.path, "bar")
- add_path = os.path.join(self.repo.path, "baz")
- us_path = os.path.join(self.repo.path, "blye")
- ut_path = os.path.join(self.repo.path, "blyat")
- with open(del_path, "w") as f:
- f.write("origstuff")
- with open(mod_path, "w") as f:
- f.write("origstuff")
- with open(us_path, "w") as f:
- f.write("origstuff")
- porcelain.add(repo=self.repo.path, paths=[del_path, mod_path, us_path])
- porcelain.commit(
- repo=self.repo.path,
- message=b"test status",
- author=b"author <email>",
- committer=b"committer <email>",
- )
- porcelain.remove(self.repo.path, [del_path])
- with open(add_path, "w") as f:
- f.write("origstuff")
- with open(mod_path, "w") as f:
- f.write("more_origstuff")
- with open(us_path, "w") as f:
- f.write("more_origstuff")
- porcelain.add(repo=self.repo.path, paths=[add_path, mod_path])
- with open(us_path, "w") as f:
- f.write("\norigstuff")
- with open(ut_path, "w") as f:
- f.write("origstuff")
- results = porcelain.status(self.repo.path)
- self.assertDictEqual(
- {"add": [b"baz"], "delete": [b"foo"], "modify": [b"bar"]},
- results.staged,
- )
- self.assertListEqual(results.unstaged, [b"blye"])
- results_no_untracked = porcelain.status(self.repo.path, untracked_files="no")
- self.assertListEqual(results_no_untracked.untracked, [])
-
- def test_status_wrong_untracked_files_value(self):
- with self.assertRaises(ValueError):
- porcelain.status(self.repo.path, untracked_files="antani")
-
- def test_status_untracked_path(self):
- untracked_dir = os.path.join(self.repo_path, "untracked_dir")
- os.mkdir(untracked_dir)
- untracked_file = os.path.join(untracked_dir, "untracked_file")
- with open(untracked_file, "w") as fh:
- fh.write("untracked")
-
- _, _, untracked = porcelain.status(self.repo.path, untracked_files="all")
- self.assertEqual(untracked, ["untracked_dir/untracked_file"])
-
- def test_status_crlf_mismatch(self):
- # First make a commit as if the file has been added on a Linux system
- # or with core.autocrlf=True
- file_path = os.path.join(self.repo.path, "crlf")
- with open(file_path, "wb") as f:
- f.write(b"line1\nline2")
- porcelain.add(repo=self.repo.path, paths=[file_path])
- porcelain.commit(
- repo=self.repo.path,
- message=b"test status",
- author=b"author <email>",
- committer=b"committer <email>",
- )
-
- # Then update the file as if it was created by CGit on a Windows
- # system with core.autocrlf=true
- with open(file_path, "wb") as f:
- f.write(b"line1\r\nline2")
-
- results = porcelain.status(self.repo)
- self.assertDictEqual({"add": [], "delete": [], "modify": []}, results.staged)
- self.assertListEqual(results.unstaged, [b"crlf"])
- self.assertListEqual(results.untracked, [])
-
- def test_status_autocrlf_true(self):
- # First make a commit as if the file has been added on a Linux system
- # or with core.autocrlf=True
- file_path = os.path.join(self.repo.path, "crlf")
- with open(file_path, "wb") as f:
- f.write(b"line1\nline2")
- porcelain.add(repo=self.repo.path, paths=[file_path])
- porcelain.commit(
- repo=self.repo.path,
- message=b"test status",
- author=b"author <email>",
- committer=b"committer <email>",
- )
-
- # Then update the file as if it was created by CGit on a Windows
- # system with core.autocrlf=true
- with open(file_path, "wb") as f:
- f.write(b"line1\r\nline2")
-
- # TODO: It should be set automatically by looking at the configuration
- c = self.repo.get_config()
- c.set("core", "autocrlf", True)
- c.write_to_path()
-
- results = porcelain.status(self.repo)
- self.assertDictEqual({"add": [], "delete": [], "modify": []}, results.staged)
- self.assertListEqual(results.unstaged, [])
- self.assertListEqual(results.untracked, [])
-
- def test_status_autocrlf_input(self):
- # Commit existing file with CRLF
- file_path = os.path.join(self.repo.path, "crlf-exists")
- with open(file_path, "wb") as f:
- f.write(b"line1\r\nline2")
- porcelain.add(repo=self.repo.path, paths=[file_path])
- porcelain.commit(
- repo=self.repo.path,
- message=b"test status",
- author=b"author <email>",
- committer=b"committer <email>",
- )
-
- c = self.repo.get_config()
- c.set("core", "autocrlf", "input")
- c.write_to_path()
-
- # Add new (untracked) file
- file_path = os.path.join(self.repo.path, "crlf-new")
- with open(file_path, "wb") as f:
- f.write(b"line1\r\nline2")
- porcelain.add(repo=self.repo.path, paths=[file_path])
-
- results = porcelain.status(self.repo)
- self.assertDictEqual(
- {"add": [b"crlf-new"], "delete": [], "modify": []}, results.staged
- )
- self.assertListEqual(results.unstaged, [])
- self.assertListEqual(results.untracked, [])
-
- def test_get_tree_changes_add(self):
- """Unit test for get_tree_changes add."""
- # Make a dummy file, stage
- filename = "bar"
- fullpath = os.path.join(self.repo.path, filename)
- with open(fullpath, "w") as f:
- f.write("stuff")
- porcelain.add(repo=self.repo.path, paths=fullpath)
- porcelain.commit(
- repo=self.repo.path,
- message=b"test status",
- author=b"author <email>",
- committer=b"committer <email>",
- )
-
- filename = "foo"
- fullpath = os.path.join(self.repo.path, filename)
- with open(fullpath, "w") as f:
- f.write("stuff")
- porcelain.add(repo=self.repo.path, paths=fullpath)
- changes = porcelain.get_tree_changes(self.repo.path)
-
- self.assertEqual(changes["add"][0], filename.encode("ascii"))
- self.assertEqual(len(changes["add"]), 1)
- self.assertEqual(len(changes["modify"]), 0)
- self.assertEqual(len(changes["delete"]), 0)
-
- def test_get_tree_changes_modify(self):
- """Unit test for get_tree_changes modify."""
- # Make a dummy file, stage, commit, modify
- filename = "foo"
- fullpath = os.path.join(self.repo.path, filename)
- with open(fullpath, "w") as f:
- f.write("stuff")
- porcelain.add(repo=self.repo.path, paths=fullpath)
- porcelain.commit(
- repo=self.repo.path,
- message=b"test status",
- author=b"author <email>",
- committer=b"committer <email>",
- )
- with open(fullpath, "w") as f:
- f.write("otherstuff")
- porcelain.add(repo=self.repo.path, paths=fullpath)
- changes = porcelain.get_tree_changes(self.repo.path)
-
- self.assertEqual(changes["modify"][0], filename.encode("ascii"))
- self.assertEqual(len(changes["add"]), 0)
- self.assertEqual(len(changes["modify"]), 1)
- self.assertEqual(len(changes["delete"]), 0)
-
- def test_get_tree_changes_delete(self):
- """Unit test for get_tree_changes delete."""
- # Make a dummy file, stage, commit, remove
- filename = "foo"
- fullpath = os.path.join(self.repo.path, filename)
- with open(fullpath, "w") as f:
- f.write("stuff")
- porcelain.add(repo=self.repo.path, paths=fullpath)
- porcelain.commit(
- repo=self.repo.path,
- message=b"test status",
- author=b"author <email>",
- committer=b"committer <email>",
- )
- cwd = os.getcwd()
- try:
- os.chdir(self.repo.path)
- porcelain.remove(repo=self.repo.path, paths=[filename])
- finally:
- os.chdir(cwd)
- changes = porcelain.get_tree_changes(self.repo.path)
-
- self.assertEqual(changes["delete"][0], filename.encode("ascii"))
- self.assertEqual(len(changes["add"]), 0)
- self.assertEqual(len(changes["modify"]), 0)
- self.assertEqual(len(changes["delete"]), 1)
-
- def test_get_untracked_paths(self):
- with open(os.path.join(self.repo.path, ".gitignore"), "w") as f:
- f.write("ignored\n")
- with open(os.path.join(self.repo.path, "ignored"), "w") as f:
- f.write("blah\n")
- with open(os.path.join(self.repo.path, "notignored"), "w") as f:
- f.write("blah\n")
- os.symlink(
- os.path.join(self.repo.path, os.pardir, "external_target"),
- os.path.join(self.repo.path, "link"),
- )
- self.assertEqual(
- {"ignored", "notignored", ".gitignore", "link"},
- set(
- porcelain.get_untracked_paths(
- self.repo.path, self.repo.path, self.repo.open_index()
- )
- ),
- )
- self.assertEqual(
- {".gitignore", "notignored", "link"},
- set(porcelain.status(self.repo).untracked),
- )
- self.assertEqual(
- {".gitignore", "notignored", "ignored", "link"},
- set(porcelain.status(self.repo, ignored=True).untracked),
- )
-
- def test_get_untracked_paths_subrepo(self):
- with open(os.path.join(self.repo.path, ".gitignore"), "w") as f:
- f.write("nested/\n")
- with open(os.path.join(self.repo.path, "notignored"), "w") as f:
- f.write("blah\n")
-
- subrepo = Repo.init(os.path.join(self.repo.path, "nested"), mkdir=True)
- with open(os.path.join(subrepo.path, "ignored"), "w") as f:
- f.write("bleep\n")
- with open(os.path.join(subrepo.path, "with"), "w") as f:
- f.write("bloop\n")
- with open(os.path.join(subrepo.path, "manager"), "w") as f:
- f.write("blop\n")
-
- self.assertEqual(
- {".gitignore", "notignored", os.path.join("nested", "")},
- set(
- porcelain.get_untracked_paths(
- self.repo.path, self.repo.path, self.repo.open_index()
- )
- ),
- )
- self.assertEqual(
- {".gitignore", "notignored"},
- set(
- porcelain.get_untracked_paths(
- self.repo.path,
- self.repo.path,
- self.repo.open_index(),
- exclude_ignored=True,
- )
- ),
- )
- self.assertEqual(
- {"ignored", "with", "manager"},
- set(
- porcelain.get_untracked_paths(
- subrepo.path, subrepo.path, subrepo.open_index()
- )
- ),
- )
- self.assertEqual(
- set(),
- set(
- porcelain.get_untracked_paths(
- subrepo.path,
- self.repo.path,
- self.repo.open_index(),
- )
- ),
- )
- self.assertEqual(
- {
- os.path.join("nested", "ignored"),
- os.path.join("nested", "with"),
- os.path.join("nested", "manager"),
- },
- set(
- porcelain.get_untracked_paths(
- self.repo.path,
- subrepo.path,
- self.repo.open_index(),
- )
- ),
- )
-
- def test_get_untracked_paths_subdir(self):
- with open(os.path.join(self.repo.path, ".gitignore"), "w") as f:
- f.write("subdir/\nignored")
- with open(os.path.join(self.repo.path, "notignored"), "w") as f:
- f.write("blah\n")
- os.mkdir(os.path.join(self.repo.path, "subdir"))
- with open(os.path.join(self.repo.path, "ignored"), "w") as f:
- f.write("foo")
- with open(os.path.join(self.repo.path, "subdir", "ignored"), "w") as f:
- f.write("foo")
-
- self.assertEqual(
- {
- ".gitignore",
- "notignored",
- "ignored",
- os.path.join("subdir", ""),
- },
- set(
- porcelain.get_untracked_paths(
- self.repo.path,
- self.repo.path,
- self.repo.open_index(),
- )
- ),
- )
- self.assertEqual(
- {".gitignore", "notignored"},
- set(
- porcelain.get_untracked_paths(
- self.repo.path,
- self.repo.path,
- self.repo.open_index(),
- exclude_ignored=True,
- )
- ),
- )
-
- def test_get_untracked_paths_invalid_untracked_files(self):
- with self.assertRaises(ValueError):
- list(
- porcelain.get_untracked_paths(
- self.repo.path,
- self.repo.path,
- self.repo.open_index(),
- untracked_files="invalid_value",
- )
- )
-
- def test_get_untracked_paths_normal(self):
- with self.assertRaises(NotImplementedError):
- _, _, _ = porcelain.status(repo=self.repo.path, untracked_files="normal")
-
-
-# TODO(jelmer): Add test for dulwich.porcelain.daemon
-
-
-class UploadPackTests(PorcelainTestCase):
- """Tests for upload_pack."""
-
- def test_upload_pack(self):
- outf = BytesIO()
- exitcode = porcelain.upload_pack(self.repo.path, BytesIO(b"0000"), outf)
- outlines = outf.getvalue().splitlines()
- self.assertEqual([b"0000"], outlines)
- self.assertEqual(0, exitcode)
-
-
-class ReceivePackTests(PorcelainTestCase):
- """Tests for receive_pack."""
-
- def test_receive_pack(self):
- filename = "foo"
- fullpath = os.path.join(self.repo.path, filename)
- with open(fullpath, "w") as f:
- f.write("stuff")
- porcelain.add(repo=self.repo.path, paths=fullpath)
- self.repo.do_commit(
- message=b"test status",
- author=b"author <email>",
- committer=b"committer <email>",
- author_timestamp=1402354300,
- commit_timestamp=1402354300,
- author_timezone=0,
- commit_timezone=0,
- )
- outf = BytesIO()
- exitcode = porcelain.receive_pack(self.repo.path, BytesIO(b"0000"), outf)
- outlines = outf.getvalue().splitlines()
- self.assertEqual(
- [
- b"0091319b56ce3aee2d489f759736a79cc552c9bb86d9 HEAD\x00 report-status "
- b"delete-refs quiet ofs-delta side-band-64k "
- b"no-done symref=HEAD:refs/heads/master",
- b"003f319b56ce3aee2d489f759736a79cc552c9bb86d9 refs/heads/master",
- b"0000",
- ],
- outlines,
- )
- self.assertEqual(0, exitcode)
-
-
-class BranchListTests(PorcelainTestCase):
- def test_standard(self):
- self.assertEqual(set(), set(porcelain.branch_list(self.repo)))
-
- def test_new_branch(self):
- [c1] = build_commit_graph(self.repo.object_store, [[1]])
- self.repo[b"HEAD"] = c1.id
- porcelain.branch_create(self.repo, b"foo")
- self.assertEqual({b"master", b"foo"}, set(porcelain.branch_list(self.repo)))
-
-
-class BranchCreateTests(PorcelainTestCase):
- def test_branch_exists(self):
- [c1] = build_commit_graph(self.repo.object_store, [[1]])
- self.repo[b"HEAD"] = c1.id
- porcelain.branch_create(self.repo, b"foo")
- self.assertRaises(porcelain.Error, porcelain.branch_create, self.repo, b"foo")
- porcelain.branch_create(self.repo, b"foo", force=True)
-
- def test_new_branch(self):
- [c1] = build_commit_graph(self.repo.object_store, [[1]])
- self.repo[b"HEAD"] = c1.id
- porcelain.branch_create(self.repo, b"foo")
- self.assertEqual({b"master", b"foo"}, set(porcelain.branch_list(self.repo)))
-
-
-class BranchDeleteTests(PorcelainTestCase):
- def test_simple(self):
- [c1] = build_commit_graph(self.repo.object_store, [[1]])
- self.repo[b"HEAD"] = c1.id
- porcelain.branch_create(self.repo, b"foo")
- self.assertIn(b"foo", porcelain.branch_list(self.repo))
- porcelain.branch_delete(self.repo, b"foo")
- self.assertNotIn(b"foo", porcelain.branch_list(self.repo))
-
- def test_simple_unicode(self):
- [c1] = build_commit_graph(self.repo.object_store, [[1]])
- self.repo[b"HEAD"] = c1.id
- porcelain.branch_create(self.repo, "foo")
- self.assertIn(b"foo", porcelain.branch_list(self.repo))
- porcelain.branch_delete(self.repo, "foo")
- self.assertNotIn(b"foo", porcelain.branch_list(self.repo))
-
-
-class FetchTests(PorcelainTestCase):
- def test_simple(self):
- outstream = BytesIO()
- errstream = BytesIO()
-
- # create a file for initial commit
- handle, fullpath = tempfile.mkstemp(dir=self.repo.path)
- os.close(handle)
- porcelain.add(repo=self.repo.path, paths=fullpath)
- porcelain.commit(
- repo=self.repo.path,
- message=b"test",
- author=b"test <email>",
- committer=b"test <email>",
- )
-
- # Setup target repo
- target_path = tempfile.mkdtemp()
- self.addCleanup(shutil.rmtree, target_path)
- target_repo = porcelain.clone(
- self.repo.path, target=target_path, errstream=errstream
- )
-
- # create a second file to be pushed
- handle, fullpath = tempfile.mkstemp(dir=self.repo.path)
- os.close(handle)
- porcelain.add(repo=self.repo.path, paths=fullpath)
- porcelain.commit(
- repo=self.repo.path,
- message=b"test2",
- author=b"test2 <email>",
- committer=b"test2 <email>",
- )
-
- self.assertNotIn(self.repo[b"HEAD"].id, target_repo)
- target_repo.close()
-
- # Fetch changes into the cloned repo
- porcelain.fetch(target_path, "origin", outstream=outstream, errstream=errstream)
-
- # Assert that fetch updated the local image of the remote
- self.assert_correct_remote_refs(target_repo.get_refs(), self.repo.get_refs())
-
- # Check the target repo for pushed changes
- with Repo(target_path) as r:
- self.assertIn(self.repo[b"HEAD"].id, r)
-
- def test_with_remote_name(self):
- remote_name = "origin"
- outstream = BytesIO()
- errstream = BytesIO()
-
- # create a file for initial commit
- handle, fullpath = tempfile.mkstemp(dir=self.repo.path)
- os.close(handle)
- porcelain.add(repo=self.repo.path, paths=fullpath)
- porcelain.commit(
- repo=self.repo.path,
- message=b"test",
- author=b"test <email>",
- committer=b"test <email>",
- )
-
- # Setup target repo
- target_path = tempfile.mkdtemp()
- self.addCleanup(shutil.rmtree, target_path)
- target_repo = porcelain.clone(
- self.repo.path, target=target_path, errstream=errstream
- )
-
- # Capture current refs
- target_refs = target_repo.get_refs()
-
- # create a second file to be pushed
- handle, fullpath = tempfile.mkstemp(dir=self.repo.path)
- os.close(handle)
- porcelain.add(repo=self.repo.path, paths=fullpath)
- porcelain.commit(
- repo=self.repo.path,
- message=b"test2",
- author=b"test2 <email>",
- committer=b"test2 <email>",
- )
-
- self.assertNotIn(self.repo[b"HEAD"].id, target_repo)
-
- target_config = target_repo.get_config()
- target_config.set(
- (b"remote", remote_name.encode()), b"url", self.repo.path.encode()
- )
- target_repo.close()
-
- # Fetch changes into the cloned repo
- porcelain.fetch(
- target_path, remote_name, outstream=outstream, errstream=errstream
- )
-
- # Assert that fetch updated the local image of the remote
- self.assert_correct_remote_refs(target_repo.get_refs(), self.repo.get_refs())
-
- # Check the target repo for pushed changes, as well as updates
- # for the refs
- with Repo(target_path) as r:
- self.assertIn(self.repo[b"HEAD"].id, r)
- self.assertNotEqual(self.repo.get_refs(), target_refs)
-
- def assert_correct_remote_refs(
- self, local_refs, remote_refs, remote_name=b"origin"
- ):
- """Assert that known remote refs corresponds to actual remote refs."""
- local_ref_prefix = b"refs/heads"
- remote_ref_prefix = b"refs/remotes/" + remote_name
-
- locally_known_remote_refs = {
- k[len(remote_ref_prefix) + 1 :]: v
- for k, v in local_refs.items()
- if k.startswith(remote_ref_prefix)
- }
-
- normalized_remote_refs = {
- k[len(local_ref_prefix) + 1 :]: v
- for k, v in remote_refs.items()
- if k.startswith(local_ref_prefix)
- }
- if b"HEAD" in locally_known_remote_refs and b"HEAD" in remote_refs:
- normalized_remote_refs[b"HEAD"] = remote_refs[b"HEAD"]
-
- self.assertEqual(locally_known_remote_refs, normalized_remote_refs)
-
-
-class RepackTests(PorcelainTestCase):
- def test_empty(self):
- porcelain.repack(self.repo)
-
- def test_simple(self):
- handle, fullpath = tempfile.mkstemp(dir=self.repo.path)
- os.close(handle)
- porcelain.add(repo=self.repo.path, paths=fullpath)
- porcelain.repack(self.repo)
-
-
-class LsTreeTests(PorcelainTestCase):
- def test_empty(self):
- porcelain.commit(
- repo=self.repo.path,
- message=b"test status",
- author=b"author <email>",
- committer=b"committer <email>",
- )
-
- f = StringIO()
- porcelain.ls_tree(self.repo, b"HEAD", outstream=f)
- self.assertEqual(f.getvalue(), "")
-
- def test_simple(self):
- # Commit a dummy file then modify it
- fullpath = os.path.join(self.repo.path, "foo")
- with open(fullpath, "w") as f:
- f.write("origstuff")
-
- porcelain.add(repo=self.repo.path, paths=[fullpath])
- porcelain.commit(
- repo=self.repo.path,
- message=b"test status",
- author=b"author <email>",
- committer=b"committer <email>",
- )
-
- f = StringIO()
- porcelain.ls_tree(self.repo, b"HEAD", outstream=f)
- self.assertEqual(
- f.getvalue(),
- "100644 blob 8b82634d7eae019850bb883f06abf428c58bc9aa\tfoo\n",
- )
-
- def test_recursive(self):
- # Create a directory then write a dummy file in it
- dirpath = os.path.join(self.repo.path, "adir")
- filepath = os.path.join(dirpath, "afile")
- os.mkdir(dirpath)
- with open(filepath, "w") as f:
- f.write("origstuff")
- porcelain.add(repo=self.repo.path, paths=[filepath])
- porcelain.commit(
- repo=self.repo.path,
- message=b"test status",
- author=b"author <email>",
- committer=b"committer <email>",
- )
- f = StringIO()
- porcelain.ls_tree(self.repo, b"HEAD", outstream=f)
- self.assertEqual(
- f.getvalue(),
- "40000 tree b145cc69a5e17693e24d8a7be0016ed8075de66d\tadir\n",
- )
- f = StringIO()
- porcelain.ls_tree(self.repo, b"HEAD", outstream=f, recursive=True)
- self.assertEqual(
- f.getvalue(),
- "40000 tree b145cc69a5e17693e24d8a7be0016ed8075de66d\tadir\n"
- "100644 blob 8b82634d7eae019850bb883f06abf428c58bc9aa\tadir"
- "/afile\n",
- )
-
-
-class LsRemoteTests(PorcelainTestCase):
- def test_empty(self):
- self.assertEqual({}, porcelain.ls_remote(self.repo.path))
-
- def test_some(self):
- cid = porcelain.commit(
- repo=self.repo.path,
- message=b"test status",
- author=b"author <email>",
- committer=b"committer <email>",
- )
-
- self.assertEqual(
- {b"refs/heads/master": cid, b"HEAD": cid},
- porcelain.ls_remote(self.repo.path),
- )
-
-
-class LsFilesTests(PorcelainTestCase):
- def test_empty(self):
- self.assertEqual([], list(porcelain.ls_files(self.repo)))
-
- def test_simple(self):
- # Commit a dummy file then modify it
- fullpath = os.path.join(self.repo.path, "foo")
- with open(fullpath, "w") as f:
- f.write("origstuff")
-
- porcelain.add(repo=self.repo.path, paths=[fullpath])
- self.assertEqual([b"foo"], list(porcelain.ls_files(self.repo)))
-
-
-class RemoteAddTests(PorcelainTestCase):
- def test_new(self):
- porcelain.remote_add(self.repo, "jelmer", "git://jelmer.uk/code/dulwich")
- c = self.repo.get_config()
- self.assertEqual(
- c.get((b"remote", b"jelmer"), b"url"),
- b"git://jelmer.uk/code/dulwich",
- )
-
- def test_exists(self):
- porcelain.remote_add(self.repo, "jelmer", "git://jelmer.uk/code/dulwich")
- self.assertRaises(
- porcelain.RemoteExists,
- porcelain.remote_add,
- self.repo,
- "jelmer",
- "git://jelmer.uk/code/dulwich",
- )
-
-
-class RemoteRemoveTests(PorcelainTestCase):
- def test_remove(self):
- porcelain.remote_add(self.repo, "jelmer", "git://jelmer.uk/code/dulwich")
- c = self.repo.get_config()
- self.assertEqual(
- c.get((b"remote", b"jelmer"), b"url"),
- b"git://jelmer.uk/code/dulwich",
- )
- porcelain.remote_remove(self.repo, "jelmer")
- self.assertRaises(KeyError, porcelain.remote_remove, self.repo, "jelmer")
- c = self.repo.get_config()
- self.assertRaises(KeyError, c.get, (b"remote", b"jelmer"), b"url")
-
-
-class CheckIgnoreTests(PorcelainTestCase):
- def test_check_ignored(self):
- with open(os.path.join(self.repo.path, ".gitignore"), "w") as f:
- f.write("foo")
- foo_path = os.path.join(self.repo.path, "foo")
- with open(foo_path, "w") as f:
- f.write("BAR")
- bar_path = os.path.join(self.repo.path, "bar")
- with open(bar_path, "w") as f:
- f.write("BAR")
- self.assertEqual(["foo"], list(porcelain.check_ignore(self.repo, [foo_path])))
- self.assertEqual([], list(porcelain.check_ignore(self.repo, [bar_path])))
-
- def test_check_added_abs(self):
- path = os.path.join(self.repo.path, "foo")
- with open(path, "w") as f:
- f.write("BAR")
- self.repo.stage(["foo"])
- with open(os.path.join(self.repo.path, ".gitignore"), "w") as f:
- f.write("foo\n")
- self.assertEqual([], list(porcelain.check_ignore(self.repo, [path])))
- self.assertEqual(
- ["foo"],
- list(porcelain.check_ignore(self.repo, [path], no_index=True)),
- )
-
- def test_check_added_rel(self):
- with open(os.path.join(self.repo.path, "foo"), "w") as f:
- f.write("BAR")
- self.repo.stage(["foo"])
- with open(os.path.join(self.repo.path, ".gitignore"), "w") as f:
- f.write("foo\n")
- cwd = os.getcwd()
- os.mkdir(os.path.join(self.repo.path, "bar"))
- os.chdir(os.path.join(self.repo.path, "bar"))
- try:
- self.assertEqual(list(porcelain.check_ignore(self.repo, ["../foo"])), [])
- self.assertEqual(
- ["../foo"],
- list(porcelain.check_ignore(self.repo, ["../foo"], no_index=True)),
- )
- finally:
- os.chdir(cwd)
-
-
-class UpdateHeadTests(PorcelainTestCase):
- def test_set_to_branch(self):
- [c1] = build_commit_graph(self.repo.object_store, [[1]])
- self.repo.refs[b"refs/heads/blah"] = c1.id
- porcelain.update_head(self.repo, "blah")
- self.assertEqual(c1.id, self.repo.head())
- self.assertEqual(b"ref: refs/heads/blah", self.repo.refs.read_ref(b"HEAD"))
-
- def test_set_to_branch_detached(self):
- [c1] = build_commit_graph(self.repo.object_store, [[1]])
- self.repo.refs[b"refs/heads/blah"] = c1.id
- porcelain.update_head(self.repo, "blah", detached=True)
- self.assertEqual(c1.id, self.repo.head())
- self.assertEqual(c1.id, self.repo.refs.read_ref(b"HEAD"))
-
- def test_set_to_commit_detached(self):
- [c1] = build_commit_graph(self.repo.object_store, [[1]])
- self.repo.refs[b"refs/heads/blah"] = c1.id
- porcelain.update_head(self.repo, c1.id, detached=True)
- self.assertEqual(c1.id, self.repo.head())
- self.assertEqual(c1.id, self.repo.refs.read_ref(b"HEAD"))
-
- def test_set_new_branch(self):
- [c1] = build_commit_graph(self.repo.object_store, [[1]])
- self.repo.refs[b"refs/heads/blah"] = c1.id
- porcelain.update_head(self.repo, "blah", new_branch="bar")
- self.assertEqual(c1.id, self.repo.head())
- self.assertEqual(b"ref: refs/heads/bar", self.repo.refs.read_ref(b"HEAD"))
-
-
-class MailmapTests(PorcelainTestCase):
- def test_no_mailmap(self):
- self.assertEqual(
- b"Jelmer Vernooij <jelmer@samba.org>",
- porcelain.check_mailmap(self.repo, b"Jelmer Vernooij <jelmer@samba.org>"),
- )
-
- def test_mailmap_lookup(self):
- with open(os.path.join(self.repo.path, ".mailmap"), "wb") as f:
- f.write(
- b"""\
-Jelmer Vernooij <jelmer@debian.org>
-"""
- )
- self.assertEqual(
- b"Jelmer Vernooij <jelmer@debian.org>",
- porcelain.check_mailmap(self.repo, b"Jelmer Vernooij <jelmer@samba.org>"),
- )
-
-
-class FsckTests(PorcelainTestCase):
- def test_none(self):
- self.assertEqual([], list(porcelain.fsck(self.repo)))
-
- def test_git_dir(self):
- obj = Tree()
- a = Blob()
- a.data = b"foo"
- obj.add(b".git", 0o100644, a.id)
- self.repo.object_store.add_objects([(a, None), (obj, None)])
- self.assertEqual(
- [(obj.id, "invalid name .git")],
- [(sha, str(e)) for (sha, e) in porcelain.fsck(self.repo)],
- )
-
-
-class DescribeTests(PorcelainTestCase):
- def test_no_commits(self):
- self.assertRaises(KeyError, porcelain.describe, self.repo.path)
-
- def test_single_commit(self):
- fullpath = os.path.join(self.repo.path, "foo")
- with open(fullpath, "w") as f:
- f.write("BAR")
- porcelain.add(repo=self.repo.path, paths=[fullpath])
- sha = porcelain.commit(
- self.repo.path,
- message=b"Some message",
- author=b"Joe <joe@example.com>",
- committer=b"Bob <bob@example.com>",
- )
- self.assertEqual(
- "g{}".format(sha[:7].decode("ascii")),
- porcelain.describe(self.repo.path),
- )
-
- def test_tag(self):
- fullpath = os.path.join(self.repo.path, "foo")
- with open(fullpath, "w") as f:
- f.write("BAR")
- porcelain.add(repo=self.repo.path, paths=[fullpath])
- porcelain.commit(
- self.repo.path,
- message=b"Some message",
- author=b"Joe <joe@example.com>",
- committer=b"Bob <bob@example.com>",
- )
- porcelain.tag_create(
- self.repo.path,
- b"tryme",
- b"foo <foo@bar.com>",
- b"bar",
- annotated=True,
- )
- self.assertEqual("tryme", porcelain.describe(self.repo.path))
-
- def test_tag_and_commit(self):
- fullpath = os.path.join(self.repo.path, "foo")
- with open(fullpath, "w") as f:
- f.write("BAR")
- porcelain.add(repo=self.repo.path, paths=[fullpath])
- porcelain.commit(
- self.repo.path,
- message=b"Some message",
- author=b"Joe <joe@example.com>",
- committer=b"Bob <bob@example.com>",
- )
- porcelain.tag_create(
- self.repo.path,
- b"tryme",
- b"foo <foo@bar.com>",
- b"bar",
- annotated=True,
- )
- with open(fullpath, "w") as f:
- f.write("BAR2")
- porcelain.add(repo=self.repo.path, paths=[fullpath])
- sha = porcelain.commit(
- self.repo.path,
- message=b"Some message",
- author=b"Joe <joe@example.com>",
- committer=b"Bob <bob@example.com>",
- )
- self.assertEqual(
- "tryme-1-g{}".format(sha[:7].decode("ascii")),
- porcelain.describe(self.repo.path),
- )
-
- def test_tag_and_commit_full(self):
- fullpath = os.path.join(self.repo.path, "foo")
- with open(fullpath, "w") as f:
- f.write("BAR")
- porcelain.add(repo=self.repo.path, paths=[fullpath])
- porcelain.commit(
- self.repo.path,
- message=b"Some message",
- author=b"Joe <joe@example.com>",
- committer=b"Bob <bob@example.com>",
- )
- porcelain.tag_create(
- self.repo.path,
- b"tryme",
- b"foo <foo@bar.com>",
- b"bar",
- annotated=True,
- )
- with open(fullpath, "w") as f:
- f.write("BAR2")
- porcelain.add(repo=self.repo.path, paths=[fullpath])
- sha = porcelain.commit(
- self.repo.path,
- message=b"Some message",
- author=b"Joe <joe@example.com>",
- committer=b"Bob <bob@example.com>",
- )
- self.assertEqual(
- "tryme-1-g{}".format(sha.decode("ascii")),
- porcelain.describe(self.repo.path, abbrev=40),
- )
-
-
-class PathToTreeTests(PorcelainTestCase):
- def setUp(self):
- super().setUp()
- self.fp = os.path.join(self.test_dir, "bar")
- with open(self.fp, "w") as f:
- f.write("something")
- oldcwd = os.getcwd()
- self.addCleanup(os.chdir, oldcwd)
- os.chdir(self.test_dir)
-
- def test_path_to_tree_path_base(self):
- self.assertEqual(b"bar", porcelain.path_to_tree_path(self.test_dir, self.fp))
- self.assertEqual(b"bar", porcelain.path_to_tree_path(".", "./bar"))
- self.assertEqual(b"bar", porcelain.path_to_tree_path(".", "bar"))
- cwd = os.getcwd()
- self.assertEqual(
- b"bar", porcelain.path_to_tree_path(".", os.path.join(cwd, "bar"))
- )
- self.assertEqual(b"bar", porcelain.path_to_tree_path(cwd, "bar"))
-
- def test_path_to_tree_path_syntax(self):
- self.assertEqual(b"bar", porcelain.path_to_tree_path(".", "./bar"))
-
- def test_path_to_tree_path_error(self):
- with self.assertRaises(ValueError):
- with tempfile.TemporaryDirectory() as od:
- porcelain.path_to_tree_path(od, self.fp)
-
- def test_path_to_tree_path_rel(self):
- cwd = os.getcwd()
- os.mkdir(os.path.join(self.repo.path, "foo"))
- os.mkdir(os.path.join(self.repo.path, "foo/bar"))
- try:
- os.chdir(os.path.join(self.repo.path, "foo/bar"))
- with open("baz", "w") as f:
- f.write("contents")
- self.assertEqual(b"bar/baz", porcelain.path_to_tree_path("..", "baz"))
- self.assertEqual(
- b"bar/baz",
- porcelain.path_to_tree_path(
- os.path.join(os.getcwd(), ".."),
- os.path.join(os.getcwd(), "baz"),
- ),
- )
- self.assertEqual(
- b"bar/baz",
- porcelain.path_to_tree_path("..", os.path.join(os.getcwd(), "baz")),
- )
- self.assertEqual(
- b"bar/baz",
- porcelain.path_to_tree_path(os.path.join(os.getcwd(), ".."), "baz"),
- )
- finally:
- os.chdir(cwd)
-
-
-class GetObjectByPathTests(PorcelainTestCase):
- def test_simple(self):
- fullpath = os.path.join(self.repo.path, "foo")
- with open(fullpath, "w") as f:
- f.write("BAR")
- porcelain.add(repo=self.repo.path, paths=[fullpath])
- porcelain.commit(
- self.repo.path,
- message=b"Some message",
- author=b"Joe <joe@example.com>",
- committer=b"Bob <bob@example.com>",
- )
- self.assertEqual(b"BAR", porcelain.get_object_by_path(self.repo, "foo").data)
- self.assertEqual(b"BAR", porcelain.get_object_by_path(self.repo, b"foo").data)
-
- def test_encoding(self):
- fullpath = os.path.join(self.repo.path, "foo")
- with open(fullpath, "w") as f:
- f.write("BAR")
- porcelain.add(repo=self.repo.path, paths=[fullpath])
- porcelain.commit(
- self.repo.path,
- message=b"Some message",
- author=b"Joe <joe@example.com>",
- committer=b"Bob <bob@example.com>",
- encoding=b"utf-8",
- )
- self.assertEqual(b"BAR", porcelain.get_object_by_path(self.repo, "foo").data)
- self.assertEqual(b"BAR", porcelain.get_object_by_path(self.repo, b"foo").data)
-
- def test_missing(self):
- self.assertRaises(KeyError, porcelain.get_object_by_path, self.repo, "foo")
-
-
-class WriteTreeTests(PorcelainTestCase):
- def test_simple(self):
- fullpath = os.path.join(self.repo.path, "foo")
- with open(fullpath, "w") as f:
- f.write("BAR")
- porcelain.add(repo=self.repo.path, paths=[fullpath])
- self.assertEqual(
- b"d2092c8a9f311f0311083bf8d177f2ca0ab5b241",
- porcelain.write_tree(self.repo),
- )
-
-
-class ActiveBranchTests(PorcelainTestCase):
- def test_simple(self):
- self.assertEqual(b"master", porcelain.active_branch(self.repo))
-
-
-class FindUniqueAbbrevTests(PorcelainTestCase):
- def test_simple(self):
- c1, c2, c3 = build_commit_graph(
- self.repo.object_store, [[1], [2, 1], [3, 1, 2]]
- )
- self.repo.refs[b"HEAD"] = c3.id
- self.assertEqual(
- c1.id.decode("ascii")[:7],
- porcelain.find_unique_abbrev(self.repo.object_store, c1.id),
- )
-
-
-class PackRefsTests(PorcelainTestCase):
- def test_all(self):
- c1, c2, c3 = build_commit_graph(
- self.repo.object_store, [[1], [2, 1], [3, 1, 2]]
- )
- self.repo.refs[b"HEAD"] = c3.id
- self.repo.refs[b"refs/heads/master"] = c2.id
- self.repo.refs[b"refs/tags/foo"] = c1.id
-
- porcelain.pack_refs(self.repo, all=True)
-
- self.assertEqual(
- self.repo.refs.get_packed_refs(),
- {
- b"refs/heads/master": c2.id,
- b"refs/tags/foo": c1.id,
- },
- )
-
- def test_not_all(self):
- c1, c2, c3 = build_commit_graph(
- self.repo.object_store, [[1], [2, 1], [3, 1, 2]]
- )
- self.repo.refs[b"HEAD"] = c3.id
- self.repo.refs[b"refs/heads/master"] = c2.id
- self.repo.refs[b"refs/tags/foo"] = c1.id
-
- porcelain.pack_refs(self.repo)
-
- self.assertEqual(
- self.repo.refs.get_packed_refs(),
- {
- b"refs/tags/foo": c1.id,
- },
- )
-
-
-class ServerTests(PorcelainTestCase):
- @contextlib.contextmanager
- def _serving(self):
- with make_server("localhost", 0, self.app) as server:
- thread = threading.Thread(target=server.serve_forever, daemon=True)
- thread.start()
-
- try:
- yield f"http://localhost:{server.server_port}"
-
- finally:
- server.shutdown()
- thread.join(10)
-
- def setUp(self):
- super().setUp()
-
- self.served_repo_path = os.path.join(self.test_dir, "served_repo.git")
- self.served_repo = Repo.init_bare(self.served_repo_path, mkdir=True)
- self.addCleanup(self.served_repo.close)
-
- backend = DictBackend({"/": self.served_repo})
- self.app = make_wsgi_chain(backend)
-
- def test_pull(self):
- (c1,) = build_commit_graph(self.served_repo.object_store, [[1]])
- self.served_repo.refs[b"refs/heads/master"] = c1.id
-
- with self._serving() as url:
- porcelain.pull(self.repo, url, "master")
-
- def test_push(self):
- (c1,) = build_commit_graph(self.repo.object_store, [[1]])
- self.repo.refs[b"refs/heads/master"] = c1.id
-
- with self._serving() as url:
- porcelain.push(self.repo, url, "master")
-
-
-class ForEachTests(PorcelainTestCase):
- def setUp(self):
- super().setUp()
- c1, c2, c3, c4 = build_commit_graph(
- self.repo.object_store, [[1], [2, 1], [3, 1, 2], [4]]
- )
- porcelain.tag_create(
- self.repo.path,
- b"v0.1",
- objectish=c1.id,
- annotated=True,
- message=b"0.1",
- )
- porcelain.tag_create(
- self.repo.path,
- b"v1.0",
- objectish=c2.id,
- annotated=True,
- message=b"1.0",
- )
- porcelain.tag_create(self.repo.path, b"simple-tag", objectish=c3.id)
- porcelain.tag_create(
- self.repo.path,
- b"v1.1",
- objectish=c4.id,
- annotated=True,
- message=b"1.1",
- )
- porcelain.branch_create(
- self.repo.path, b"feat", objectish=c2.id.decode("ascii")
- )
- self.repo.refs[b"HEAD"] = c4.id
-
- def test_for_each_ref(self):
- refs = porcelain.for_each_ref(self.repo)
-
- self.assertEqual(
- [(object_type, tag) for _, object_type, tag in refs],
- [
- (b"commit", b"refs/heads/feat"),
- (b"commit", b"refs/heads/master"),
- (b"commit", b"refs/tags/simple-tag"),
- (b"tag", b"refs/tags/v0.1"),
- (b"tag", b"refs/tags/v1.0"),
- (b"tag", b"refs/tags/v1.1"),
- ],
- )
-
- def test_for_each_ref_pattern(self):
- versions = porcelain.for_each_ref(self.repo, pattern="refs/tags/v*")
- self.assertEqual(
- [(object_type, tag) for _, object_type, tag in versions],
- [
- (b"tag", b"refs/tags/v0.1"),
- (b"tag", b"refs/tags/v1.0"),
- (b"tag", b"refs/tags/v1.1"),
- ],
- )
-
- versions = porcelain.for_each_ref(self.repo, pattern="refs/tags/v1.?")
- self.assertEqual(
- [(object_type, tag) for _, object_type, tag in versions],
- [
- (b"tag", b"refs/tags/v1.0"),
- (b"tag", b"refs/tags/v1.1"),
- ],
- )
blob - 3d12d18352fea00c16b71b733b92feb93a301f81 (mode 644)
blob + /dev/null
--- dulwich/tests/test_protocol.py
+++ /dev/null
-# test_protocol.py -- Tests for the git protocol
-# Copyright (C) 2009 Jelmer Vernooij <jelmer@jelmer.uk>
-#
-# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
-# General Public License as public by the Free Software Foundation; version 2.0
-# or (at your option) any later version. You can redistribute it and/or
-# modify it under the terms of either of these two licenses.
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# You should have received a copy of the licenses; if not, see
-# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
-# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
-# License, Version 2.0.
-#
-
-"""Tests for the smart protocol utility functions."""
-
-from io import BytesIO
-
-from dulwich.tests import TestCase
-
-from ..errors import HangupException
-from ..protocol import (
- MULTI_ACK,
- MULTI_ACK_DETAILED,
- SINGLE_ACK,
- BufferedPktLineWriter,
- GitProtocolError,
- PktLineParser,
- Protocol,
- ReceivableProtocol,
- ack_type,
- extract_capabilities,
- extract_want_line_capabilities,
-)
-
-
-class BaseProtocolTests:
- def test_write_pkt_line_none(self):
- self.proto.write_pkt_line(None)
- self.assertEqual(self.rout.getvalue(), b"0000")
-
- def test_write_pkt_line(self):
- self.proto.write_pkt_line(b"bla")
- self.assertEqual(self.rout.getvalue(), b"0007bla")
-
- def test_read_pkt_line(self):
- self.rin.write(b"0008cmd ")
- self.rin.seek(0)
- self.assertEqual(b"cmd ", self.proto.read_pkt_line())
-
- def test_eof(self):
- self.rin.write(b"0000")
- self.rin.seek(0)
- self.assertFalse(self.proto.eof())
- self.assertEqual(None, self.proto.read_pkt_line())
- self.assertTrue(self.proto.eof())
- self.assertRaises(HangupException, self.proto.read_pkt_line)
-
- def test_unread_pkt_line(self):
- self.rin.write(b"0007foo0000")
- self.rin.seek(0)
- self.assertEqual(b"foo", self.proto.read_pkt_line())
- self.proto.unread_pkt_line(b"bar")
- self.assertEqual(b"bar", self.proto.read_pkt_line())
- self.assertEqual(None, self.proto.read_pkt_line())
- self.proto.unread_pkt_line(b"baz1")
- self.assertRaises(ValueError, self.proto.unread_pkt_line, b"baz2")
-
- def test_read_pkt_seq(self):
- self.rin.write(b"0008cmd 0005l0000")
- self.rin.seek(0)
- self.assertEqual([b"cmd ", b"l"], list(self.proto.read_pkt_seq()))
-
- def test_read_pkt_line_none(self):
- self.rin.write(b"0000")
- self.rin.seek(0)
- self.assertEqual(None, self.proto.read_pkt_line())
-
- def test_read_pkt_line_wrong_size(self):
- self.rin.write(b"0100too short")
- self.rin.seek(0)
- self.assertRaises(GitProtocolError, self.proto.read_pkt_line)
-
- def test_write_sideband(self):
- self.proto.write_sideband(3, b"bloe")
- self.assertEqual(self.rout.getvalue(), b"0009\x03bloe")
-
- def test_send_cmd(self):
- self.proto.send_cmd(b"fetch", b"a", b"b")
- self.assertEqual(self.rout.getvalue(), b"000efetch a\x00b\x00")
-
- def test_read_cmd(self):
- self.rin.write(b"0012cmd arg1\x00arg2\x00")
- self.rin.seek(0)
- self.assertEqual((b"cmd", [b"arg1", b"arg2"]), self.proto.read_cmd())
-
- def test_read_cmd_noend0(self):
- self.rin.write(b"0011cmd arg1\x00arg2")
- self.rin.seek(0)
- self.assertRaises(AssertionError, self.proto.read_cmd)
-
-
-class ProtocolTests(BaseProtocolTests, TestCase):
- def setUp(self):
- TestCase.setUp(self)
- self.rout = BytesIO()
- self.rin = BytesIO()
- self.proto = Protocol(self.rin.read, self.rout.write)
-
-
-class ReceivableBytesIO(BytesIO):
- """BytesIO with socket-like recv semantics for testing."""
-
- def __init__(self) -> None:
- BytesIO.__init__(self)
- self.allow_read_past_eof = False
-
- def recv(self, size):
- # fail fast if no bytes are available; in a real socket, this would
- # block forever
- if self.tell() == len(self.getvalue()) and not self.allow_read_past_eof:
- raise GitProtocolError("Blocking read past end of socket")
- if size == 1:
- return self.read(1)
- # calls shouldn't return quite as much as asked for
- return self.read(size - 1)
-
-
-class ReceivableProtocolTests(BaseProtocolTests, TestCase):
- def setUp(self):
- TestCase.setUp(self)
- self.rout = BytesIO()
- self.rin = ReceivableBytesIO()
- self.proto = ReceivableProtocol(self.rin.recv, self.rout.write)
- self.proto._rbufsize = 8
-
- def test_eof(self):
- # Allow blocking reads past EOF just for this test. The only parts of
- # the protocol that might check for EOF do not depend on the recv()
- # semantics anyway.
- self.rin.allow_read_past_eof = True
- BaseProtocolTests.test_eof(self)
-
- def test_recv(self):
- all_data = b"1234567" * 10 # not a multiple of bufsize
- self.rin.write(all_data)
- self.rin.seek(0)
- data = b""
- # We ask for 8 bytes each time and actually read 7, so it should take
- # exactly 10 iterations.
- for _ in range(10):
- data += self.proto.recv(10)
- # any more reads would block
- self.assertRaises(GitProtocolError, self.proto.recv, 10)
- self.assertEqual(all_data, data)
-
- def test_recv_read(self):
- all_data = b"1234567" # recv exactly in one call
- self.rin.write(all_data)
- self.rin.seek(0)
- self.assertEqual(b"1234", self.proto.recv(4))
- self.assertEqual(b"567", self.proto.read(3))
- self.assertRaises(GitProtocolError, self.proto.recv, 10)
-
- def test_read_recv(self):
- all_data = b"12345678abcdefg"
- self.rin.write(all_data)
- self.rin.seek(0)
- self.assertEqual(b"1234", self.proto.read(4))
- self.assertEqual(b"5678abc", self.proto.recv(8))
- self.assertEqual(b"defg", self.proto.read(4))
- self.assertRaises(GitProtocolError, self.proto.recv, 10)
-
- def test_mixed(self):
- # arbitrary non-repeating string
- all_data = b",".join(str(i).encode("ascii") for i in range(100))
- self.rin.write(all_data)
- self.rin.seek(0)
- data = b""
-
- for i in range(1, 100):
- data += self.proto.recv(i)
- # if we get to the end, do a non-blocking read instead of blocking
- if len(data) + i > len(all_data):
- data += self.proto.recv(i)
- # ReceivableBytesIO leaves off the last byte unless we ask
- # nicely
- data += self.proto.recv(1)
- break
- else:
- data += self.proto.read(i)
- else:
- # didn't break, something must have gone wrong
- self.fail()
-
- self.assertEqual(all_data, data)
-
-
-class CapabilitiesTestCase(TestCase):
- def test_plain(self):
- self.assertEqual((b"bla", []), extract_capabilities(b"bla"))
-
- def test_caps(self):
- self.assertEqual((b"bla", [b"la"]), extract_capabilities(b"bla\0la"))
- self.assertEqual((b"bla", [b"la"]), extract_capabilities(b"bla\0la\n"))
- self.assertEqual((b"bla", [b"la", b"la"]), extract_capabilities(b"bla\0la la"))
-
- def test_plain_want_line(self):
- self.assertEqual((b"want bla", []), extract_want_line_capabilities(b"want bla"))
-
- def test_caps_want_line(self):
- self.assertEqual(
- (b"want bla", [b"la"]),
- extract_want_line_capabilities(b"want bla la"),
- )
- self.assertEqual(
- (b"want bla", [b"la"]),
- extract_want_line_capabilities(b"want bla la\n"),
- )
- self.assertEqual(
- (b"want bla", [b"la", b"la"]),
- extract_want_line_capabilities(b"want bla la la"),
- )
-
- def test_ack_type(self):
- self.assertEqual(SINGLE_ACK, ack_type([b"foo", b"bar"]))
- self.assertEqual(MULTI_ACK, ack_type([b"foo", b"bar", b"multi_ack"]))
- self.assertEqual(
- MULTI_ACK_DETAILED,
- ack_type([b"foo", b"bar", b"multi_ack_detailed"]),
- )
- # choose detailed when both present
- self.assertEqual(
- MULTI_ACK_DETAILED,
- ack_type([b"foo", b"bar", b"multi_ack", b"multi_ack_detailed"]),
- )
-
-
-class BufferedPktLineWriterTests(TestCase):
- def setUp(self):
- TestCase.setUp(self)
- self._output = BytesIO()
- self._writer = BufferedPktLineWriter(self._output.write, bufsize=16)
-
- def assertOutputEquals(self, expected):
- self.assertEqual(expected, self._output.getvalue())
-
- def _truncate(self):
- self._output.seek(0)
- self._output.truncate()
-
- def test_write(self):
- self._writer.write(b"foo")
- self.assertOutputEquals(b"")
- self._writer.flush()
- self.assertOutputEquals(b"0007foo")
-
- def test_write_none(self):
- self._writer.write(None)
- self.assertOutputEquals(b"")
- self._writer.flush()
- self.assertOutputEquals(b"0000")
-
- def test_flush_empty(self):
- self._writer.flush()
- self.assertOutputEquals(b"")
-
- def test_write_multiple(self):
- self._writer.write(b"foo")
- self._writer.write(b"bar")
- self.assertOutputEquals(b"")
- self._writer.flush()
- self.assertOutputEquals(b"0007foo0007bar")
-
- def test_write_across_boundary(self):
- self._writer.write(b"foo")
- self._writer.write(b"barbaz")
- self.assertOutputEquals(b"0007foo000abarba")
- self._truncate()
- self._writer.flush()
- self.assertOutputEquals(b"z")
-
- def test_write_to_boundary(self):
- self._writer.write(b"foo")
- self._writer.write(b"barba")
- self.assertOutputEquals(b"0007foo0009barba")
- self._truncate()
- self._writer.write(b"z")
- self._writer.flush()
- self.assertOutputEquals(b"0005z")
-
-
-class PktLineParserTests(TestCase):
- def test_none(self):
- pktlines = []
- parser = PktLineParser(pktlines.append)
- parser.parse(b"0000")
- self.assertEqual(pktlines, [None])
- self.assertEqual(b"", parser.get_tail())
-
- def test_small_fragments(self):
- pktlines = []
- parser = PktLineParser(pktlines.append)
- parser.parse(b"00")
- parser.parse(b"05")
- parser.parse(b"z0000")
- self.assertEqual(pktlines, [b"z", None])
- self.assertEqual(b"", parser.get_tail())
-
- def test_multiple_packets(self):
- pktlines = []
- parser = PktLineParser(pktlines.append)
- parser.parse(b"0005z0006aba")
- self.assertEqual(pktlines, [b"z", b"ab"])
- self.assertEqual(b"a", parser.get_tail())
blob - fd0e7d33fae413c1b963caf9783bccb3772ca007 (mode 644)
blob + /dev/null
--- dulwich/tests/test_reflog.py
+++ /dev/null
-# test_reflog.py -- tests for reflog.py
-# Copyright (C) 2015 Jelmer Vernooij <jelmer@jelmer.uk>
-#
-# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
-# General Public License as public by the Free Software Foundation; version 2.0
-# or (at your option) any later version. You can redistribute it and/or
-# modify it under the terms of either of these two licenses.
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# You should have received a copy of the licenses; if not, see
-# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
-# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
-# License, Version 2.0.
-#
-
-"""Tests for dulwich.reflog."""
-
-from io import BytesIO
-
-from dulwich.tests import TestCase
-
-from ..objects import ZERO_SHA
-from ..reflog import (
- drop_reflog_entry,
- format_reflog_line,
- parse_reflog_line,
- read_reflog,
-)
-
-
-class ReflogLineTests(TestCase):
- def test_format(self):
- self.assertEqual(
- b"0000000000000000000000000000000000000000 "
- b"49030649db3dfec5a9bc03e5dde4255a14499f16 Jelmer Vernooij "
- b"<jelmer@jelmer.uk> 1446552482 +0000 "
- b"clone: from git://jelmer.uk/samba",
- format_reflog_line(
- b"0000000000000000000000000000000000000000",
- b"49030649db3dfec5a9bc03e5dde4255a14499f16",
- b"Jelmer Vernooij <jelmer@jelmer.uk>",
- 1446552482,
- 0,
- b"clone: from git://jelmer.uk/samba",
- ),
- )
-
- self.assertEqual(
- b"0000000000000000000000000000000000000000 "
- b"49030649db3dfec5a9bc03e5dde4255a14499f16 Jelmer Vernooij "
- b"<jelmer@jelmer.uk> 1446552482 +0000 "
- b"clone: from git://jelmer.uk/samba",
- format_reflog_line(
- None,
- b"49030649db3dfec5a9bc03e5dde4255a14499f16",
- b"Jelmer Vernooij <jelmer@jelmer.uk>",
- 1446552482,
- 0,
- b"clone: from git://jelmer.uk/samba",
- ),
- )
-
- def test_parse(self):
- reflog_line = (
- b"0000000000000000000000000000000000000000 "
- b"49030649db3dfec5a9bc03e5dde4255a14499f16 Jelmer Vernooij "
- b"<jelmer@jelmer.uk> 1446552482 +0000 "
- b"clone: from git://jelmer.uk/samba"
- )
- self.assertEqual(
- (
- b"0000000000000000000000000000000000000000",
- b"49030649db3dfec5a9bc03e5dde4255a14499f16",
- b"Jelmer Vernooij <jelmer@jelmer.uk>",
- 1446552482,
- 0,
- b"clone: from git://jelmer.uk/samba",
- ),
- parse_reflog_line(reflog_line),
- )
-
-
-_TEST_REFLOG = (
- b"0000000000000000000000000000000000000000 "
- b"49030649db3dfec5a9bc03e5dde4255a14499f16 Jelmer Vernooij "
- b"<jelmer@jelmer.uk> 1446552482 +0000 "
- b"clone: from git://jelmer.uk/samba\n"
- b"49030649db3dfec5a9bc03e5dde4255a14499f16 "
- b"42d06bd4b77fed026b154d16493e5deab78f02ec Jelmer Vernooij "
- b"<jelmer@jelmer.uk> 1446552483 +0000 "
- b"clone: from git://jelmer.uk/samba\n"
- b"42d06bd4b77fed026b154d16493e5deab78f02ec "
- b"df6800012397fb85c56e7418dd4eb9405dee075c Jelmer Vernooij "
- b"<jelmer@jelmer.uk> 1446552484 +0000 "
- b"clone: from git://jelmer.uk/samba\n"
-)
-
-
-class ReflogDropTests(TestCase):
- def setUp(self):
- TestCase.setUp(self)
- self.f = BytesIO(_TEST_REFLOG)
- self.original_log = list(read_reflog(self.f))
- self.f.seek(0)
-
- def _read_log(self):
- self.f.seek(0)
- return list(read_reflog(self.f))
-
- def test_invalid(self):
- self.assertRaises(ValueError, drop_reflog_entry, self.f, -1)
-
- def test_drop_entry(self):
- drop_reflog_entry(self.f, 0)
- log = self._read_log()
- self.assertEqual(len(log), 2)
- self.assertEqual(self.original_log[0:2], log)
-
- self.f.seek(0)
- drop_reflog_entry(self.f, 1)
- log = self._read_log()
- self.assertEqual(len(log), 1)
- self.assertEqual(self.original_log[1], log[0])
-
- def test_drop_entry_with_rewrite(self):
- drop_reflog_entry(self.f, 1, True)
- log = self._read_log()
- self.assertEqual(len(log), 2)
- self.assertEqual(self.original_log[0], log[0])
- self.assertEqual(self.original_log[0].new_sha, log[1].old_sha)
- self.assertEqual(self.original_log[2].new_sha, log[1].new_sha)
-
- self.f.seek(0)
- drop_reflog_entry(self.f, 1, True)
- log = self._read_log()
- self.assertEqual(len(log), 1)
- self.assertEqual(ZERO_SHA, log[0].old_sha)
- self.assertEqual(self.original_log[2].new_sha, log[0].new_sha)
blob - 21f6bb87ffa1e12e15d1bcc84aeeec288398a8ee (mode 644)
blob + /dev/null
--- dulwich/tests/test_refs.py
+++ /dev/null
-# test_refs.py -- tests for refs.py
-# Copyright (C) 2013 Jelmer Vernooij <jelmer@jelmer.uk>
-#
-# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
-# General Public License as public by the Free Software Foundation; version 2.0
-# or (at your option) any later version. You can redistribute it and/or
-# modify it under the terms of either of these two licenses.
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# You should have received a copy of the licenses; if not, see
-# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
-# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
-# License, Version 2.0.
-#
-
-"""Tests for dulwich.refs."""
-
-import os
-import sys
-import tempfile
-from io import BytesIO
-from typing import ClassVar, Dict
-
-from dulwich import errors
-from dulwich.tests import SkipTest, TestCase
-
-from ..file import GitFile
-from ..objects import ZERO_SHA
-from ..refs import (
- DictRefsContainer,
- InfoRefsContainer,
- SymrefLoop,
- _split_ref_line,
- check_ref_format,
- parse_symref_value,
- read_packed_refs,
- read_packed_refs_with_peeled,
- strip_peeled_refs,
- write_packed_refs,
-)
-from ..repo import Repo
-from .utils import open_repo, tear_down_repo
-
-
-class CheckRefFormatTests(TestCase):
- """Tests for the check_ref_format function.
-
- These are the same tests as in the git test suite.
- """
-
- def test_valid(self):
- self.assertTrue(check_ref_format(b"heads/foo"))
- self.assertTrue(check_ref_format(b"foo/bar/baz"))
- self.assertTrue(check_ref_format(b"refs///heads/foo"))
- self.assertTrue(check_ref_format(b"foo./bar"))
- self.assertTrue(check_ref_format(b"heads/foo@bar"))
- self.assertTrue(check_ref_format(b"heads/fix.lock.error"))
-
- def test_invalid(self):
- self.assertFalse(check_ref_format(b"foo"))
- self.assertFalse(check_ref_format(b"heads/foo/"))
- self.assertFalse(check_ref_format(b"./foo"))
- self.assertFalse(check_ref_format(b".refs/foo"))
- self.assertFalse(check_ref_format(b"heads/foo..bar"))
- self.assertFalse(check_ref_format(b"heads/foo?bar"))
- self.assertFalse(check_ref_format(b"heads/foo.lock"))
- self.assertFalse(check_ref_format(b"heads/v@{ation"))
- self.assertFalse(check_ref_format(b"heads/foo\bar"))
-
-
-ONES = b"1" * 40
-TWOS = b"2" * 40
-THREES = b"3" * 40
-FOURS = b"4" * 40
-
-
-class PackedRefsFileTests(TestCase):
- def test_split_ref_line_errors(self):
- self.assertRaises(errors.PackedRefsException, _split_ref_line, b"singlefield")
- self.assertRaises(errors.PackedRefsException, _split_ref_line, b"badsha name")
- self.assertRaises(
- errors.PackedRefsException,
- _split_ref_line,
- ONES + b" bad/../refname",
- )
-
- def test_read_without_peeled(self):
- f = BytesIO(b"\n".join([b"# comment", ONES + b" ref/1", TWOS + b" ref/2"]))
- self.assertEqual(
- [(ONES, b"ref/1"), (TWOS, b"ref/2")], list(read_packed_refs(f))
- )
-
- def test_read_without_peeled_errors(self):
- f = BytesIO(b"\n".join([ONES + b" ref/1", b"^" + TWOS]))
- self.assertRaises(errors.PackedRefsException, list, read_packed_refs(f))
-
- def test_read_with_peeled(self):
- f = BytesIO(
- b"\n".join(
- [
- ONES + b" ref/1",
- TWOS + b" ref/2",
- b"^" + THREES,
- FOURS + b" ref/4",
- ]
- )
- )
- self.assertEqual(
- [
- (ONES, b"ref/1", None),
- (TWOS, b"ref/2", THREES),
- (FOURS, b"ref/4", None),
- ],
- list(read_packed_refs_with_peeled(f)),
- )
-
- def test_read_with_peeled_errors(self):
- f = BytesIO(b"\n".join([b"^" + TWOS, ONES + b" ref/1"]))
- self.assertRaises(errors.PackedRefsException, list, read_packed_refs(f))
-
- f = BytesIO(b"\n".join([ONES + b" ref/1", b"^" + TWOS, b"^" + THREES]))
- self.assertRaises(errors.PackedRefsException, list, read_packed_refs(f))
-
- def test_write_with_peeled(self):
- f = BytesIO()
- write_packed_refs(f, {b"ref/1": ONES, b"ref/2": TWOS}, {b"ref/1": THREES})
- self.assertEqual(
- b"\n".join(
- [
- b"# pack-refs with: peeled",
- ONES + b" ref/1",
- b"^" + THREES,
- TWOS + b" ref/2",
- ]
- )
- + b"\n",
- f.getvalue(),
- )
-
- def test_write_without_peeled(self):
- f = BytesIO()
- write_packed_refs(f, {b"ref/1": ONES, b"ref/2": TWOS})
- self.assertEqual(
- b"\n".join([ONES + b" ref/1", TWOS + b" ref/2"]) + b"\n",
- f.getvalue(),
- )
-
-
-# Dict of refs that we expect all RefsContainerTests subclasses to define.
-_TEST_REFS = {
- b"HEAD": b"42d06bd4b77fed026b154d16493e5deab78f02ec",
- b"refs/heads/40-char-ref-aaaaaaaaaaaaaaaaaa": b"42d06bd4b77fed026b154d16493e5deab78f02ec",
- b"refs/heads/master": b"42d06bd4b77fed026b154d16493e5deab78f02ec",
- b"refs/heads/packed": b"42d06bd4b77fed026b154d16493e5deab78f02ec",
- b"refs/tags/refs-0.1": b"df6800012397fb85c56e7418dd4eb9405dee075c",
- b"refs/tags/refs-0.2": b"3ec9c43c84ff242e3ef4a9fc5bc111fd780a76a8",
- b"refs/heads/loop": b"ref: refs/heads/loop",
-}
-
-
-class RefsContainerTests:
- def test_keys(self):
- actual_keys = set(self._refs.keys())
- self.assertEqual(set(self._refs.allkeys()), actual_keys)
- self.assertEqual(set(_TEST_REFS.keys()), actual_keys)
-
- actual_keys = self._refs.keys(b"refs/heads")
- actual_keys.discard(b"loop")
- self.assertEqual(
- [b"40-char-ref-aaaaaaaaaaaaaaaaaa", b"master", b"packed"],
- sorted(actual_keys),
- )
- self.assertEqual(
- [b"refs-0.1", b"refs-0.2"], sorted(self._refs.keys(b"refs/tags"))
- )
-
- def test_iter(self):
- actual_keys = set(self._refs.keys())
- self.assertEqual(set(self._refs), actual_keys)
- self.assertEqual(set(_TEST_REFS.keys()), actual_keys)
-
- def test_as_dict(self):
- # refs/heads/loop does not show up even if it exists
- expected_refs = dict(_TEST_REFS)
- del expected_refs[b"refs/heads/loop"]
- self.assertEqual(expected_refs, self._refs.as_dict())
-
- def test_get_symrefs(self):
- self._refs.set_symbolic_ref(b"refs/heads/src", b"refs/heads/dst")
- symrefs = self._refs.get_symrefs()
- if b"HEAD" in symrefs:
- symrefs.pop(b"HEAD")
- self.assertEqual(
- {
- b"refs/heads/src": b"refs/heads/dst",
- b"refs/heads/loop": b"refs/heads/loop",
- },
- symrefs,
- )
-
- def test_setitem(self):
- self._refs[b"refs/some/ref"] = b"42d06bd4b77fed026b154d16493e5deab78f02ec"
- self.assertEqual(
- b"42d06bd4b77fed026b154d16493e5deab78f02ec",
- self._refs[b"refs/some/ref"],
- )
- self.assertRaises(
- errors.RefFormatError,
- self._refs.__setitem__,
- b"notrefs/foo",
- b"42d06bd4b77fed026b154d16493e5deab78f02ec",
- )
-
- def test_set_if_equals(self):
- nines = b"9" * 40
- self.assertFalse(self._refs.set_if_equals(b"HEAD", b"c0ffee", nines))
- self.assertEqual(
- b"42d06bd4b77fed026b154d16493e5deab78f02ec", self._refs[b"HEAD"]
- )
-
- self.assertTrue(
- self._refs.set_if_equals(
- b"HEAD", b"42d06bd4b77fed026b154d16493e5deab78f02ec", nines
- )
- )
- self.assertEqual(nines, self._refs[b"HEAD"])
-
- # Setting the ref again is a no-op, but will return True.
- self.assertTrue(self._refs.set_if_equals(b"HEAD", nines, nines))
- self.assertEqual(nines, self._refs[b"HEAD"])
-
- self.assertTrue(self._refs.set_if_equals(b"refs/heads/master", None, nines))
- self.assertEqual(nines, self._refs[b"refs/heads/master"])
-
- self.assertTrue(
- self._refs.set_if_equals(b"refs/heads/nonexistent", ZERO_SHA, nines)
- )
- self.assertEqual(nines, self._refs[b"refs/heads/nonexistent"])
-
- def test_add_if_new(self):
- nines = b"9" * 40
- self.assertFalse(self._refs.add_if_new(b"refs/heads/master", nines))
- self.assertEqual(
- b"42d06bd4b77fed026b154d16493e5deab78f02ec",
- self._refs[b"refs/heads/master"],
- )
-
- self.assertTrue(self._refs.add_if_new(b"refs/some/ref", nines))
- self.assertEqual(nines, self._refs[b"refs/some/ref"])
-
- def test_set_symbolic_ref(self):
- self._refs.set_symbolic_ref(b"refs/heads/symbolic", b"refs/heads/master")
- self.assertEqual(
- b"ref: refs/heads/master",
- self._refs.read_loose_ref(b"refs/heads/symbolic"),
- )
- self.assertEqual(
- b"42d06bd4b77fed026b154d16493e5deab78f02ec",
- self._refs[b"refs/heads/symbolic"],
- )
-
- def test_set_symbolic_ref_overwrite(self):
- nines = b"9" * 40
- self.assertNotIn(b"refs/heads/symbolic", self._refs)
- self._refs[b"refs/heads/symbolic"] = nines
- self.assertEqual(nines, self._refs.read_loose_ref(b"refs/heads/symbolic"))
- self._refs.set_symbolic_ref(b"refs/heads/symbolic", b"refs/heads/master")
- self.assertEqual(
- b"ref: refs/heads/master",
- self._refs.read_loose_ref(b"refs/heads/symbolic"),
- )
- self.assertEqual(
- b"42d06bd4b77fed026b154d16493e5deab78f02ec",
- self._refs[b"refs/heads/symbolic"],
- )
-
- def test_check_refname(self):
- self._refs._check_refname(b"HEAD")
- self._refs._check_refname(b"refs/stash")
- self._refs._check_refname(b"refs/heads/foo")
-
- self.assertRaises(errors.RefFormatError, self._refs._check_refname, b"refs")
- self.assertRaises(
- errors.RefFormatError, self._refs._check_refname, b"notrefs/foo"
- )
-
- def test_contains(self):
- self.assertIn(b"refs/heads/master", self._refs)
- self.assertNotIn(b"refs/heads/bar", self._refs)
-
- def test_delitem(self):
- self.assertEqual(
- b"42d06bd4b77fed026b154d16493e5deab78f02ec",
- self._refs[b"refs/heads/master"],
- )
- del self._refs[b"refs/heads/master"]
- self.assertRaises(KeyError, lambda: self._refs[b"refs/heads/master"])
-
- def test_remove_if_equals(self):
- self.assertFalse(self._refs.remove_if_equals(b"HEAD", b"c0ffee"))
- self.assertEqual(
- b"42d06bd4b77fed026b154d16493e5deab78f02ec", self._refs[b"HEAD"]
- )
- self.assertTrue(
- self._refs.remove_if_equals(
- b"refs/tags/refs-0.2",
- b"3ec9c43c84ff242e3ef4a9fc5bc111fd780a76a8",
- )
- )
- self.assertTrue(self._refs.remove_if_equals(b"refs/tags/refs-0.2", ZERO_SHA))
- self.assertNotIn(b"refs/tags/refs-0.2", self._refs)
-
- def test_import_refs_name(self):
- self._refs[b"refs/remotes/origin/other"] = (
- b"48d01bd4b77fed026b154d16493e5deab78f02ec"
- )
- self._refs.import_refs(
- b"refs/remotes/origin",
- {b"master": b"42d06bd4b77fed026b154d16493e5deab78f02ec"},
- )
- self.assertEqual(
- b"42d06bd4b77fed026b154d16493e5deab78f02ec",
- self._refs[b"refs/remotes/origin/master"],
- )
- self.assertEqual(
- b"48d01bd4b77fed026b154d16493e5deab78f02ec",
- self._refs[b"refs/remotes/origin/other"],
- )
-
- def test_import_refs_name_prune(self):
- self._refs[b"refs/remotes/origin/other"] = (
- b"48d01bd4b77fed026b154d16493e5deab78f02ec"
- )
- self._refs.import_refs(
- b"refs/remotes/origin",
- {b"master": b"42d06bd4b77fed026b154d16493e5deab78f02ec"},
- prune=True,
- )
- self.assertEqual(
- b"42d06bd4b77fed026b154d16493e5deab78f02ec",
- self._refs[b"refs/remotes/origin/master"],
- )
- self.assertNotIn(b"refs/remotes/origin/other", self._refs)
-
-
-class DictRefsContainerTests(RefsContainerTests, TestCase):
- def setUp(self):
- TestCase.setUp(self)
- self._refs = DictRefsContainer(dict(_TEST_REFS))
-
- def test_invalid_refname(self):
- # FIXME: Move this test into RefsContainerTests, but requires
- # some way of injecting invalid refs.
- self._refs._refs[b"refs/stash"] = b"00" * 20
- expected_refs = dict(_TEST_REFS)
- del expected_refs[b"refs/heads/loop"]
- expected_refs[b"refs/stash"] = b"00" * 20
- self.assertEqual(expected_refs, self._refs.as_dict())
-
-
-class DiskRefsContainerTests(RefsContainerTests, TestCase):
- def setUp(self):
- TestCase.setUp(self)
- self._repo = open_repo("refs.git")
- self.addCleanup(tear_down_repo, self._repo)
- self._refs = self._repo.refs
-
- def test_get_packed_refs(self):
- self.assertEqual(
- {
- b"refs/heads/packed": b"42d06bd4b77fed026b154d16493e5deab78f02ec",
- b"refs/tags/refs-0.1": b"df6800012397fb85c56e7418dd4eb9405dee075c",
- },
- self._refs.get_packed_refs(),
- )
-
- def test_get_peeled_not_packed(self):
- # not packed
- self.assertEqual(None, self._refs.get_peeled(b"refs/tags/refs-0.2"))
- self.assertEqual(
- b"3ec9c43c84ff242e3ef4a9fc5bc111fd780a76a8",
- self._refs[b"refs/tags/refs-0.2"],
- )
-
- # packed, known not peelable
- self.assertEqual(
- self._refs[b"refs/heads/packed"],
- self._refs.get_peeled(b"refs/heads/packed"),
- )
-
- # packed, peeled
- self.assertEqual(
- b"42d06bd4b77fed026b154d16493e5deab78f02ec",
- self._refs.get_peeled(b"refs/tags/refs-0.1"),
- )
-
- def test_setitem(self):
- RefsContainerTests.test_setitem(self)
- path = os.path.join(self._refs.path, b"refs", b"some", b"ref")
- with open(path, "rb") as f:
- self.assertEqual(b"42d06bd4b77fed026b154d16493e5deab78f02ec", f.read()[:40])
-
- self.assertRaises(
- OSError,
- self._refs.__setitem__,
- b"refs/some/ref/sub",
- b"42d06bd4b77fed026b154d16493e5deab78f02ec",
- )
-
- def test_delete_refs_container(self):
- # We shouldn't delete the refs directory
- self._refs[b"refs/heads/blah"] = b"42d06bd4b77fed026b154d16493e5deab78f02ec"
- for ref in self._refs.allkeys():
- del self._refs[ref]
- self.assertTrue(os.path.exists(os.path.join(self._refs.path, b"refs")))
-
- def test_setitem_packed(self):
- with open(os.path.join(self._refs.path, b"packed-refs"), "w") as f:
- f.write("# pack-refs with: peeled fully-peeled sorted \n")
- f.write("42d06bd4b77fed026b154d16493e5deab78f02ec refs/heads/packed\n")
-
- # It's allowed to set a new ref on a packed ref, the new ref will be
- # placed outside on refs/
- self._refs[b"refs/heads/packed"] = b"3ec9c43c84ff242e3ef4a9fc5bc111fd780a76a8"
- packed_ref_path = os.path.join(self._refs.path, b"refs", b"heads", b"packed")
- with open(packed_ref_path, "rb") as f:
- self.assertEqual(b"3ec9c43c84ff242e3ef4a9fc5bc111fd780a76a8", f.read()[:40])
-
- self.assertRaises(
- OSError,
- self._refs.__setitem__,
- b"refs/heads/packed/sub",
- b"42d06bd4b77fed026b154d16493e5deab78f02ec",
- )
-
- # this shouldn't overwrite the packed refs
- self.assertEqual(
- {b"refs/heads/packed": b"42d06bd4b77fed026b154d16493e5deab78f02ec"},
- self._refs.get_packed_refs(),
- )
-
- def test_add_packed_refs(self):
- # first, create a non-packed ref
- self._refs[b"refs/heads/packed"] = b"3ec9c43c84ff242e3ef4a9fc5bc111fd780a76a8"
-
- packed_ref_path = os.path.join(self._refs.path, b"refs", b"heads", b"packed")
- self.assertTrue(os.path.exists(packed_ref_path))
-
- # now overwrite that with a packed ref
- packed_refs_file_path = os.path.join(self._refs.path, b"packed-refs")
- self._refs.add_packed_refs(
- {
- b"refs/heads/packed": b"42d06bd4b77fed026b154d16493e5deab78f02ec",
- }
- )
-
- # that should kill the file
- self.assertFalse(os.path.exists(packed_ref_path))
-
- # now delete the packed ref
- self._refs.add_packed_refs(
- {
- b"refs/heads/packed": None,
- }
- )
-
- # and it's gone!
- self.assertFalse(os.path.exists(packed_ref_path))
-
- self.assertRaises(
- KeyError,
- self._refs.__getitem__,
- b"refs/heads/packed",
- )
-
- # just in case, make sure we can't pack HEAD
- self.assertRaises(
- ValueError,
- self._refs.add_packed_refs,
- {b"HEAD": "02ac81614bcdbd585a37b4b0edf8cb8a"},
- )
-
- # delete all packed refs
- self._refs.add_packed_refs({ref: None for ref in self._refs.get_packed_refs()})
-
- self.assertEqual({}, self._refs.get_packed_refs())
-
- # remove the packed ref file, and check that adding nothing doesn't affect that
- os.remove(packed_refs_file_path)
-
- # adding nothing doesn't make it reappear
- self._refs.add_packed_refs({})
-
- self.assertFalse(os.path.exists(packed_refs_file_path))
-
- def test_setitem_symbolic(self):
- ones = b"1" * 40
- self._refs[b"HEAD"] = ones
- self.assertEqual(ones, self._refs[b"HEAD"])
-
- # ensure HEAD was not modified
- f = open(os.path.join(self._refs.path, b"HEAD"), "rb")
- v = next(iter(f)).rstrip(b"\n\r")
- f.close()
- self.assertEqual(b"ref: refs/heads/master", v)
-
- # ensure the symbolic link was written through
- f = open(os.path.join(self._refs.path, b"refs", b"heads", b"master"), "rb")
- self.assertEqual(ones, f.read()[:40])
- f.close()
-
- def test_set_if_equals(self):
- RefsContainerTests.test_set_if_equals(self)
-
- # ensure symref was followed
- self.assertEqual(b"9" * 40, self._refs[b"refs/heads/master"])
-
- # ensure lockfile was deleted
- self.assertFalse(
- os.path.exists(
- os.path.join(self._refs.path, b"refs", b"heads", b"master.lock")
- )
- )
- self.assertFalse(os.path.exists(os.path.join(self._refs.path, b"HEAD.lock")))
-
- def test_add_if_new_packed(self):
- # don't overwrite packed ref
- self.assertFalse(self._refs.add_if_new(b"refs/tags/refs-0.1", b"9" * 40))
- self.assertEqual(
- b"df6800012397fb85c56e7418dd4eb9405dee075c",
- self._refs[b"refs/tags/refs-0.1"],
- )
-
- def test_add_if_new_symbolic(self):
- # Use an empty repo instead of the default.
- repo_dir = os.path.join(tempfile.mkdtemp(), "test")
- os.makedirs(repo_dir)
- repo = Repo.init(repo_dir)
- self.addCleanup(tear_down_repo, repo)
- refs = repo.refs
-
- nines = b"9" * 40
- self.assertEqual(b"ref: refs/heads/master", refs.read_ref(b"HEAD"))
- self.assertNotIn(b"refs/heads/master", refs)
- self.assertTrue(refs.add_if_new(b"HEAD", nines))
- self.assertEqual(b"ref: refs/heads/master", refs.read_ref(b"HEAD"))
- self.assertEqual(nines, refs[b"HEAD"])
- self.assertEqual(nines, refs[b"refs/heads/master"])
- self.assertFalse(refs.add_if_new(b"HEAD", b"1" * 40))
- self.assertEqual(nines, refs[b"HEAD"])
- self.assertEqual(nines, refs[b"refs/heads/master"])
-
- def test_follow(self):
- self.assertEqual(
- (
- [b"HEAD", b"refs/heads/master"],
- b"42d06bd4b77fed026b154d16493e5deab78f02ec",
- ),
- self._refs.follow(b"HEAD"),
- )
- self.assertEqual(
- (
- [b"refs/heads/master"],
- b"42d06bd4b77fed026b154d16493e5deab78f02ec",
- ),
- self._refs.follow(b"refs/heads/master"),
- )
- self.assertRaises(SymrefLoop, self._refs.follow, b"refs/heads/loop")
-
- def test_set_overwrite_loop(self):
- self.assertRaises(SymrefLoop, self._refs.follow, b"refs/heads/loop")
- self._refs[b"refs/heads/loop"] = b"42d06bd4b77fed026b154d16493e5deab78f02ec"
- self.assertEqual(
- ([b"refs/heads/loop"], b"42d06bd4b77fed026b154d16493e5deab78f02ec"),
- self._refs.follow(b"refs/heads/loop"),
- )
-
- def test_delitem(self):
- RefsContainerTests.test_delitem(self)
- ref_file = os.path.join(self._refs.path, b"refs", b"heads", b"master")
- self.assertFalse(os.path.exists(ref_file))
- self.assertNotIn(b"refs/heads/master", self._refs.get_packed_refs())
-
- def test_delitem_symbolic(self):
- self.assertEqual(b"ref: refs/heads/master", self._refs.read_loose_ref(b"HEAD"))
- del self._refs[b"HEAD"]
- self.assertRaises(KeyError, lambda: self._refs[b"HEAD"])
- self.assertEqual(
- b"42d06bd4b77fed026b154d16493e5deab78f02ec",
- self._refs[b"refs/heads/master"],
- )
- self.assertFalse(os.path.exists(os.path.join(self._refs.path, b"HEAD")))
-
- def test_remove_if_equals_symref(self):
- # HEAD is a symref, so shouldn't equal its dereferenced value
- self.assertFalse(
- self._refs.remove_if_equals(
- b"HEAD", b"42d06bd4b77fed026b154d16493e5deab78f02ec"
- )
- )
- self.assertTrue(
- self._refs.remove_if_equals(
- b"refs/heads/master",
- b"42d06bd4b77fed026b154d16493e5deab78f02ec",
- )
- )
- self.assertRaises(KeyError, lambda: self._refs[b"refs/heads/master"])
-
- # HEAD is now a broken symref
- self.assertRaises(KeyError, lambda: self._refs[b"HEAD"])
- self.assertEqual(b"ref: refs/heads/master", self._refs.read_loose_ref(b"HEAD"))
-
- self.assertFalse(
- os.path.exists(
- os.path.join(self._refs.path, b"refs", b"heads", b"master.lock")
- )
- )
- self.assertFalse(os.path.exists(os.path.join(self._refs.path, b"HEAD.lock")))
-
- def test_remove_packed_without_peeled(self):
- refs_file = os.path.join(self._repo.path, "packed-refs")
- f = GitFile(refs_file)
- refs_data = f.read()
- f.close()
- f = GitFile(refs_file, "wb")
- f.write(
- b"\n".join(
- line
- for line in refs_data.split(b"\n")
- if not line or line[0] not in b"#^"
- )
- )
- f.close()
- self._repo = Repo(self._repo.path)
- refs = self._repo.refs
- self.assertTrue(
- refs.remove_if_equals(
- b"refs/heads/packed",
- b"42d06bd4b77fed026b154d16493e5deab78f02ec",
- )
- )
-
- def test_remove_if_equals_packed(self):
- # test removing ref that is only packed
- self.assertEqual(
- b"df6800012397fb85c56e7418dd4eb9405dee075c",
- self._refs[b"refs/tags/refs-0.1"],
- )
- self.assertTrue(
- self._refs.remove_if_equals(
- b"refs/tags/refs-0.1",
- b"df6800012397fb85c56e7418dd4eb9405dee075c",
- )
- )
- self.assertRaises(KeyError, lambda: self._refs[b"refs/tags/refs-0.1"])
-
- def test_remove_parent(self):
- self._refs[b"refs/heads/foo/bar"] = b"df6800012397fb85c56e7418dd4eb9405dee075c"
- del self._refs[b"refs/heads/foo/bar"]
- ref_file = os.path.join(
- self._refs.path,
- b"refs",
- b"heads",
- b"foo",
- b"bar",
- )
- self.assertFalse(os.path.exists(ref_file))
- ref_file = os.path.join(self._refs.path, b"refs", b"heads", b"foo")
- self.assertFalse(os.path.exists(ref_file))
- ref_file = os.path.join(self._refs.path, b"refs", b"heads")
- self.assertTrue(os.path.exists(ref_file))
- self._refs[b"refs/heads/foo"] = b"df6800012397fb85c56e7418dd4eb9405dee075c"
-
- def test_read_ref(self):
- self.assertEqual(b"ref: refs/heads/master", self._refs.read_ref(b"HEAD"))
- self.assertEqual(
- b"42d06bd4b77fed026b154d16493e5deab78f02ec",
- self._refs.read_ref(b"refs/heads/packed"),
- )
- self.assertEqual(None, self._refs.read_ref(b"nonexistent"))
-
- def test_read_loose_ref(self):
- self._refs[b"refs/heads/foo"] = b"df6800012397fb85c56e7418dd4eb9405dee075c"
-
- self.assertEqual(None, self._refs.read_ref(b"refs/heads/foo/bar"))
-
- def test_non_ascii(self):
- try:
- encoded_ref = os.fsencode("refs/tags/schön")
- except UnicodeEncodeError as exc:
- raise SkipTest(
- "filesystem encoding doesn't support special character"
- ) from exc
- p = os.path.join(os.fsencode(self._repo.path), encoded_ref)
- with open(p, "w") as f:
- f.write("00" * 20)
-
- expected_refs = dict(_TEST_REFS)
- expected_refs[encoded_ref] = b"00" * 20
- del expected_refs[b"refs/heads/loop"]
-
- self.assertEqual(expected_refs, self._repo.get_refs())
-
- def test_cyrillic(self):
- if sys.platform in ("darwin", "win32"):
- raise SkipTest("filesystem encoding doesn't support arbitrary bytes")
- # reported in https://github.com/dulwich/dulwich/issues/608
- name = b"\xcd\xee\xe2\xe0\xff\xe2\xe5\xf2\xea\xe01"
- encoded_ref = b"refs/heads/" + name
- with open(os.path.join(os.fsencode(self._repo.path), encoded_ref), "w") as f:
- f.write("00" * 20)
-
- expected_refs = set(_TEST_REFS.keys())
- expected_refs.add(encoded_ref)
-
- self.assertEqual(expected_refs, set(self._repo.refs.allkeys()))
- self.assertEqual(
- {r[len(b"refs/") :] for r in expected_refs if r.startswith(b"refs/")},
- set(self._repo.refs.subkeys(b"refs/")),
- )
- expected_refs.remove(b"refs/heads/loop")
- expected_refs.add(b"HEAD")
- self.assertEqual(expected_refs, set(self._repo.get_refs().keys()))
-
-
-_TEST_REFS_SERIALIZED = (
- b"42d06bd4b77fed026b154d16493e5deab78f02ec\t"
- b"refs/heads/40-char-ref-aaaaaaaaaaaaaaaaaa\n"
- b"42d06bd4b77fed026b154d16493e5deab78f02ec\trefs/heads/master\n"
- b"42d06bd4b77fed026b154d16493e5deab78f02ec\trefs/heads/packed\n"
- b"df6800012397fb85c56e7418dd4eb9405dee075c\trefs/tags/refs-0.1\n"
- b"3ec9c43c84ff242e3ef4a9fc5bc111fd780a76a8\trefs/tags/refs-0.2\n"
-)
-
-
-class InfoRefsContainerTests(TestCase):
- def test_invalid_refname(self):
- text = _TEST_REFS_SERIALIZED + b"00" * 20 + b"\trefs/stash\n"
- refs = InfoRefsContainer(BytesIO(text))
- expected_refs = dict(_TEST_REFS)
- del expected_refs[b"HEAD"]
- expected_refs[b"refs/stash"] = b"00" * 20
- del expected_refs[b"refs/heads/loop"]
- self.assertEqual(expected_refs, refs.as_dict())
-
- def test_keys(self):
- refs = InfoRefsContainer(BytesIO(_TEST_REFS_SERIALIZED))
- actual_keys = set(refs.keys())
- self.assertEqual(set(refs.allkeys()), actual_keys)
- expected_refs = dict(_TEST_REFS)
- del expected_refs[b"HEAD"]
- del expected_refs[b"refs/heads/loop"]
- self.assertEqual(set(expected_refs.keys()), actual_keys)
-
- actual_keys = refs.keys(b"refs/heads")
- actual_keys.discard(b"loop")
- self.assertEqual(
- [b"40-char-ref-aaaaaaaaaaaaaaaaaa", b"master", b"packed"],
- sorted(actual_keys),
- )
- self.assertEqual([b"refs-0.1", b"refs-0.2"], sorted(refs.keys(b"refs/tags")))
-
- def test_as_dict(self):
- refs = InfoRefsContainer(BytesIO(_TEST_REFS_SERIALIZED))
- # refs/heads/loop does not show up even if it exists
- expected_refs = dict(_TEST_REFS)
- del expected_refs[b"HEAD"]
- del expected_refs[b"refs/heads/loop"]
- self.assertEqual(expected_refs, refs.as_dict())
-
- def test_contains(self):
- refs = InfoRefsContainer(BytesIO(_TEST_REFS_SERIALIZED))
- self.assertIn(b"refs/heads/master", refs)
- self.assertNotIn(b"refs/heads/bar", refs)
-
- def test_get_peeled(self):
- refs = InfoRefsContainer(BytesIO(_TEST_REFS_SERIALIZED))
- # refs/heads/loop does not show up even if it exists
- self.assertEqual(
- _TEST_REFS[b"refs/heads/master"],
- refs.get_peeled(b"refs/heads/master"),
- )
-
-
-class ParseSymrefValueTests(TestCase):
- def test_valid(self):
- self.assertEqual(b"refs/heads/foo", parse_symref_value(b"ref: refs/heads/foo"))
-
- def test_invalid(self):
- self.assertRaises(ValueError, parse_symref_value, b"foobar")
-
-
-class StripPeeledRefsTests(TestCase):
- all_refs: ClassVar[Dict[bytes, bytes]] = {
- b"refs/heads/master": b"8843d7f92416211de9ebb963ff4ce28125932878",
- b"refs/heads/testing": b"186a005b134d8639a58b6731c7c1ea821a6eedba",
- b"refs/tags/1.0.0": b"a93db4b0360cc635a2b93675010bac8d101f73f0",
- b"refs/tags/1.0.0^{}": b"a93db4b0360cc635a2b93675010bac8d101f73f0",
- b"refs/tags/2.0.0": b"0749936d0956c661ac8f8d3483774509c165f89e",
- b"refs/tags/2.0.0^{}": b"0749936d0956c661ac8f8d3483774509c165f89e",
- }
- non_peeled_refs: ClassVar[Dict[bytes, bytes]] = {
- b"refs/heads/master": b"8843d7f92416211de9ebb963ff4ce28125932878",
- b"refs/heads/testing": b"186a005b134d8639a58b6731c7c1ea821a6eedba",
- b"refs/tags/1.0.0": b"a93db4b0360cc635a2b93675010bac8d101f73f0",
- b"refs/tags/2.0.0": b"0749936d0956c661ac8f8d3483774509c165f89e",
- }
-
- def test_strip_peeled_refs(self):
- # Simple check of two dicts
- self.assertEqual(strip_peeled_refs(self.all_refs), self.non_peeled_refs)
blob - a2d11802a3a9e906e969f88be4d4abcdd4d703c0 (mode 644)
blob + /dev/null
--- dulwich/tests/test_repository.py
+++ /dev/null
-# test_repository.py -- tests for repository.py
-# Copyright (C) 2007 James Westby <jw+debian@jameswestby.net>
-#
-# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
-# General Public License as public by the Free Software Foundation; version 2.0
-# or (at your option) any later version. You can redistribute it and/or
-# modify it under the terms of either of these two licenses.
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# You should have received a copy of the licenses; if not, see
-# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
-# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
-# License, Version 2.0.
-#
-
-"""Tests for the repository."""
-
-import glob
-import locale
-import os
-import shutil
-import stat
-import sys
-import tempfile
-import warnings
-
-from dulwich import errors, objects, porcelain
-from dulwich.tests import TestCase, skipIf
-
-from ..config import Config
-from ..errors import NotGitRepository
-from ..object_store import tree_lookup_path
-from ..repo import (
- InvalidUserIdentity,
- MemoryRepo,
- Repo,
- UnsupportedExtension,
- UnsupportedVersion,
- check_user_identity,
-)
-from .utils import open_repo, setup_warning_catcher, tear_down_repo
-
-missing_sha = b"b91fa4d900e17e99b433218e988c4eb4a3e9a097"
-
-
-class CreateRepositoryTests(TestCase):
- def assertFileContentsEqual(self, expected, repo, path):
- f = repo.get_named_file(path)
- if not f:
- self.assertEqual(expected, None)
- else:
- with f:
- self.assertEqual(expected, f.read())
-
- def _check_repo_contents(self, repo, expect_bare):
- self.assertEqual(expect_bare, repo.bare)
- self.assertFileContentsEqual(b"Unnamed repository", repo, "description")
- self.assertFileContentsEqual(b"", repo, os.path.join("info", "exclude"))
- self.assertFileContentsEqual(None, repo, "nonexistent file")
- barestr = b"bare = " + str(expect_bare).lower().encode("ascii")
- with repo.get_named_file("config") as f:
- config_text = f.read()
- self.assertIn(barestr, config_text, "%r" % config_text)
- expect_filemode = sys.platform != "win32"
- barestr = b"filemode = " + str(expect_filemode).lower().encode("ascii")
- with repo.get_named_file("config") as f:
- config_text = f.read()
- self.assertIn(barestr, config_text, "%r" % config_text)
-
- if isinstance(repo, Repo):
- expected_mode = "0o100644" if expect_filemode else "0o100666"
- expected = {
- "HEAD": expected_mode,
- "config": expected_mode,
- "description": expected_mode,
- }
- actual = {
- f[len(repo._controldir) + 1 :]: oct(os.stat(f).st_mode)
- for f in glob.glob(os.path.join(repo._controldir, "*"))
- if os.path.isfile(f)
- }
-
- self.assertEqual(expected, actual)
-
- def test_create_memory(self):
- repo = MemoryRepo.init_bare([], {})
- self._check_repo_contents(repo, True)
-
- def test_create_disk_bare(self):
- tmp_dir = tempfile.mkdtemp()
- self.addCleanup(shutil.rmtree, tmp_dir)
- repo = Repo.init_bare(tmp_dir)
- self.assertEqual(tmp_dir, repo._controldir)
- self._check_repo_contents(repo, True)
-
- def test_create_disk_non_bare(self):
- tmp_dir = tempfile.mkdtemp()
- self.addCleanup(shutil.rmtree, tmp_dir)
- repo = Repo.init(tmp_dir)
- self.assertEqual(os.path.join(tmp_dir, ".git"), repo._controldir)
- self._check_repo_contents(repo, False)
-
- def test_create_disk_non_bare_mkdir(self):
- tmp_dir = tempfile.mkdtemp()
- target_dir = os.path.join(tmp_dir, "target")
- self.addCleanup(shutil.rmtree, tmp_dir)
- repo = Repo.init(target_dir, mkdir=True)
- self.assertEqual(os.path.join(target_dir, ".git"), repo._controldir)
- self._check_repo_contents(repo, False)
-
- def test_create_disk_bare_mkdir(self):
- tmp_dir = tempfile.mkdtemp()
- target_dir = os.path.join(tmp_dir, "target")
- self.addCleanup(shutil.rmtree, tmp_dir)
- repo = Repo.init_bare(target_dir, mkdir=True)
- self.assertEqual(target_dir, repo._controldir)
- self._check_repo_contents(repo, True)
-
-
-class MemoryRepoTests(TestCase):
- def test_set_description(self):
- r = MemoryRepo.init_bare([], {})
- description = b"Some description"
- r.set_description(description)
- self.assertEqual(description, r.get_description())
-
- def test_pull_into(self):
- r = MemoryRepo.init_bare([], {})
- repo = open_repo("a.git")
- self.addCleanup(tear_down_repo, repo)
- repo.fetch(r)
-
-
-class RepositoryRootTests(TestCase):
- def mkdtemp(self):
- return tempfile.mkdtemp()
-
- def open_repo(self, name):
- temp_dir = self.mkdtemp()
- repo = open_repo(name, temp_dir)
- self.addCleanup(tear_down_repo, repo)
- return repo
-
- def test_simple_props(self):
- r = self.open_repo("a.git")
- self.assertEqual(r.controldir(), r.path)
-
- def test_setitem(self):
- r = self.open_repo("a.git")
- r[b"refs/tags/foo"] = b"a90fa2d900a17e99b433217e988c4eb4a2e9a097"
- self.assertEqual(
- b"a90fa2d900a17e99b433217e988c4eb4a2e9a097", r[b"refs/tags/foo"].id
- )
-
- def test_getitem_unicode(self):
- r = self.open_repo("a.git")
-
- test_keys = [
- (b"refs/heads/master", True),
- (b"a90fa2d900a17e99b433217e988c4eb4a2e9a097", True),
- (b"11" * 19 + b"--", False),
- ]
-
- for k, contained in test_keys:
- self.assertEqual(k in r, contained)
-
- # Avoid deprecation warning under Py3.2+
- if getattr(self, "assertRaisesRegex", None):
- assertRaisesRegexp = self.assertRaisesRegex
- else:
- assertRaisesRegexp = self.assertRaisesRegexp
- for k, _ in test_keys:
- assertRaisesRegexp(
- TypeError,
- "'name' must be bytestring, not int",
- r.__getitem__,
- 12,
- )
-
- def test_delitem(self):
- r = self.open_repo("a.git")
-
- del r[b"refs/heads/master"]
- self.assertRaises(KeyError, lambda: r[b"refs/heads/master"])
-
- del r[b"HEAD"]
- self.assertRaises(KeyError, lambda: r[b"HEAD"])
-
- self.assertRaises(ValueError, r.__delitem__, b"notrefs/foo")
-
- def test_get_refs(self):
- r = self.open_repo("a.git")
- self.assertEqual(
- {
- b"HEAD": b"a90fa2d900a17e99b433217e988c4eb4a2e9a097",
- b"refs/heads/master": b"a90fa2d900a17e99b433217e988c4eb4a2e9a097",
- b"refs/tags/mytag": b"28237f4dc30d0d462658d6b937b08a0f0b6ef55a",
- b"refs/tags/mytag-packed": b"b0931cadc54336e78a1d980420e3268903b57a50",
- },
- r.get_refs(),
- )
-
- def test_head(self):
- r = self.open_repo("a.git")
- self.assertEqual(r.head(), b"a90fa2d900a17e99b433217e988c4eb4a2e9a097")
-
- def test_get_object(self):
- r = self.open_repo("a.git")
- obj = r.get_object(r.head())
- self.assertEqual(obj.type_name, b"commit")
-
- def test_get_object_non_existant(self):
- r = self.open_repo("a.git")
- self.assertRaises(KeyError, r.get_object, missing_sha)
-
- def test_contains_object(self):
- r = self.open_repo("a.git")
- self.assertIn(r.head(), r)
- self.assertNotIn(b"z" * 40, r)
-
- def test_contains_ref(self):
- r = self.open_repo("a.git")
- self.assertIn(b"HEAD", r)
-
- def test_get_no_description(self):
- r = self.open_repo("a.git")
- self.assertIs(None, r.get_description())
-
- def test_get_description(self):
- r = self.open_repo("a.git")
- with open(os.path.join(r.path, "description"), "wb") as f:
- f.write(b"Some description")
- self.assertEqual(b"Some description", r.get_description())
-
- def test_set_description(self):
- r = self.open_repo("a.git")
- description = b"Some description"
- r.set_description(description)
- self.assertEqual(description, r.get_description())
-
- def test_contains_missing(self):
- r = self.open_repo("a.git")
- self.assertNotIn(b"bar", r)
-
- def test_get_peeled(self):
- # unpacked ref
- r = self.open_repo("a.git")
- tag_sha = b"28237f4dc30d0d462658d6b937b08a0f0b6ef55a"
- self.assertNotEqual(r[tag_sha].sha().hexdigest(), r.head())
- self.assertEqual(r.get_peeled(b"refs/tags/mytag"), r.head())
-
- # packed ref with cached peeled value
- packed_tag_sha = b"b0931cadc54336e78a1d980420e3268903b57a50"
- parent_sha = r[r.head()].parents[0]
- self.assertNotEqual(r[packed_tag_sha].sha().hexdigest(), parent_sha)
- self.assertEqual(r.get_peeled(b"refs/tags/mytag-packed"), parent_sha)
-
- # TODO: add more corner cases to test repo
-
- def test_get_peeled_not_tag(self):
- r = self.open_repo("a.git")
- self.assertEqual(r.get_peeled(b"HEAD"), r.head())
-
- def test_get_parents(self):
- r = self.open_repo("a.git")
- self.assertEqual(
- [b"2a72d929692c41d8554c07f6301757ba18a65d91"],
- r.get_parents(b"a90fa2d900a17e99b433217e988c4eb4a2e9a097"),
- )
- r.update_shallow([b"a90fa2d900a17e99b433217e988c4eb4a2e9a097"], None)
- self.assertEqual([], r.get_parents(b"a90fa2d900a17e99b433217e988c4eb4a2e9a097"))
-
- def test_get_walker(self):
- r = self.open_repo("a.git")
- # include defaults to [r.head()]
- self.assertEqual(
- [e.commit.id for e in r.get_walker()],
- [r.head(), b"2a72d929692c41d8554c07f6301757ba18a65d91"],
- )
- self.assertEqual(
- [
- e.commit.id
- for e in r.get_walker([b"2a72d929692c41d8554c07f6301757ba18a65d91"])
- ],
- [b"2a72d929692c41d8554c07f6301757ba18a65d91"],
- )
- self.assertEqual(
- [
- e.commit.id
- for e in r.get_walker(b"2a72d929692c41d8554c07f6301757ba18a65d91")
- ],
- [b"2a72d929692c41d8554c07f6301757ba18a65d91"],
- )
-
- def assertFilesystemHidden(self, path):
- if sys.platform != "win32":
- return
- import ctypes
- from ctypes.wintypes import DWORD, LPCWSTR
-
- GetFileAttributesW = ctypes.WINFUNCTYPE(DWORD, LPCWSTR)(
- ("GetFileAttributesW", ctypes.windll.kernel32)
- )
- self.assertTrue(2 & GetFileAttributesW(path))
-
- def test_init_existing(self):
- tmp_dir = self.mkdtemp()
- self.addCleanup(shutil.rmtree, tmp_dir)
- t = Repo.init(tmp_dir)
- self.addCleanup(t.close)
- self.assertEqual(os.listdir(tmp_dir), [".git"])
- self.assertFilesystemHidden(os.path.join(tmp_dir, ".git"))
-
- def test_init_mkdir(self):
- tmp_dir = self.mkdtemp()
- self.addCleanup(shutil.rmtree, tmp_dir)
- repo_dir = os.path.join(tmp_dir, "a-repo")
-
- t = Repo.init(repo_dir, mkdir=True)
- self.addCleanup(t.close)
- self.assertEqual(os.listdir(repo_dir), [".git"])
- self.assertFilesystemHidden(os.path.join(repo_dir, ".git"))
-
- def test_init_mkdir_unicode(self):
- repo_name = "\xa7"
- try:
- os.fsencode(repo_name)
- except UnicodeEncodeError:
- self.skipTest("filesystem lacks unicode support")
- tmp_dir = self.mkdtemp()
- self.addCleanup(shutil.rmtree, tmp_dir)
- repo_dir = os.path.join(tmp_dir, repo_name)
-
- t = Repo.init(repo_dir, mkdir=True)
- self.addCleanup(t.close)
- self.assertEqual(os.listdir(repo_dir), [".git"])
- self.assertFilesystemHidden(os.path.join(repo_dir, ".git"))
-
- @skipIf(sys.platform == "win32", "fails on Windows")
- def test_fetch(self):
- r = self.open_repo("a.git")
- tmp_dir = self.mkdtemp()
- self.addCleanup(shutil.rmtree, tmp_dir)
- t = Repo.init(tmp_dir)
- self.addCleanup(t.close)
- r.fetch(t)
- self.assertIn(b"a90fa2d900a17e99b433217e988c4eb4a2e9a097", t)
- self.assertIn(b"a90fa2d900a17e99b433217e988c4eb4a2e9a097", t)
- self.assertIn(b"a90fa2d900a17e99b433217e988c4eb4a2e9a097", t)
- self.assertIn(b"28237f4dc30d0d462658d6b937b08a0f0b6ef55a", t)
- self.assertIn(b"b0931cadc54336e78a1d980420e3268903b57a50", t)
-
- @skipIf(sys.platform == "win32", "fails on Windows")
- def test_fetch_ignores_missing_refs(self):
- r = self.open_repo("a.git")
- missing = b"1234566789123456789123567891234657373833"
- r.refs[b"refs/heads/blah"] = missing
- tmp_dir = self.mkdtemp()
- self.addCleanup(shutil.rmtree, tmp_dir)
- t = Repo.init(tmp_dir)
- self.addCleanup(t.close)
- r.fetch(t)
- self.assertIn(b"a90fa2d900a17e99b433217e988c4eb4a2e9a097", t)
- self.assertIn(b"a90fa2d900a17e99b433217e988c4eb4a2e9a097", t)
- self.assertIn(b"a90fa2d900a17e99b433217e988c4eb4a2e9a097", t)
- self.assertIn(b"28237f4dc30d0d462658d6b937b08a0f0b6ef55a", t)
- self.assertIn(b"b0931cadc54336e78a1d980420e3268903b57a50", t)
- self.assertNotIn(missing, t)
-
- def test_clone(self):
- r = self.open_repo("a.git")
- tmp_dir = self.mkdtemp()
- self.addCleanup(shutil.rmtree, tmp_dir)
- with r.clone(tmp_dir, mkdir=False) as t:
- self.assertEqual(
- {
- b"HEAD": b"a90fa2d900a17e99b433217e988c4eb4a2e9a097",
- b"refs/remotes/origin/master": b"a90fa2d900a17e99b433217e988c4eb4a2e9a097",
- b"refs/remotes/origin/HEAD": b"a90fa2d900a17e99b433217e988c4eb4a2e9a097",
- b"refs/heads/master": b"a90fa2d900a17e99b433217e988c4eb4a2e9a097",
- b"refs/tags/mytag": b"28237f4dc30d0d462658d6b937b08a0f0b6ef55a",
- b"refs/tags/mytag-packed": b"b0931cadc54336e78a1d980420e3268903b57a50",
- },
- t.refs.as_dict(),
- )
- shas = [e.commit.id for e in r.get_walker()]
- self.assertEqual(
- shas, [t.head(), b"2a72d929692c41d8554c07f6301757ba18a65d91"]
- )
- c = t.get_config()
- encoded_path = r.path
- if not isinstance(encoded_path, bytes):
- encoded_path = os.fsencode(encoded_path)
- self.assertEqual(encoded_path, c.get((b"remote", b"origin"), b"url"))
- self.assertEqual(
- b"+refs/heads/*:refs/remotes/origin/*",
- c.get((b"remote", b"origin"), b"fetch"),
- )
-
- def test_clone_no_head(self):
- temp_dir = self.mkdtemp()
- self.addCleanup(shutil.rmtree, temp_dir)
- repo_dir = os.path.join(
- os.path.dirname(__file__), "..", "..", "testdata", "repos"
- )
- dest_dir = os.path.join(temp_dir, "a.git")
- shutil.copytree(os.path.join(repo_dir, "a.git"), dest_dir, symlinks=True)
- r = Repo(dest_dir)
- self.addCleanup(r.close)
- del r.refs[b"refs/heads/master"]
- del r.refs[b"HEAD"]
- t = r.clone(os.path.join(temp_dir, "b.git"), mkdir=True)
- self.addCleanup(t.close)
- self.assertEqual(
- {
- b"refs/tags/mytag": b"28237f4dc30d0d462658d6b937b08a0f0b6ef55a",
- b"refs/tags/mytag-packed": b"b0931cadc54336e78a1d980420e3268903b57a50",
- },
- t.refs.as_dict(),
- )
-
- def test_clone_empty(self):
- """Test clone() doesn't crash if HEAD points to a non-existing ref.
-
- This simulates cloning server-side bare repository either when it is
- still empty or if user renames master branch and pushes private repo
- to the server.
- Non-bare repo HEAD always points to an existing ref.
- """
- r = self.open_repo("empty.git")
- tmp_dir = self.mkdtemp()
- self.addCleanup(shutil.rmtree, tmp_dir)
- r.clone(tmp_dir, mkdir=False, bare=True)
-
- def test_reset_index_symlink_enabled(self):
- if sys.platform == "win32":
- self.skipTest("symlinks are not supported on Windows")
- tmp_dir = self.mkdtemp()
- self.addCleanup(shutil.rmtree, tmp_dir)
-
- o = Repo.init(os.path.join(tmp_dir, "s"), mkdir=True)
- os.symlink("foo", os.path.join(tmp_dir, "s", "bar"))
- o.stage("bar")
- o.do_commit(b"add symlink")
-
- t = o.clone(os.path.join(tmp_dir, "t"), symlinks=True)
- o.close()
- bar_path = os.path.join(tmp_dir, "t", "bar")
- if sys.platform == "win32":
- with open(bar_path) as f:
- self.assertEqual("foo", f.read())
- else:
- self.assertEqual("foo", os.readlink(bar_path))
- t.close()
-
- def test_reset_index_symlink_disabled(self):
- tmp_dir = self.mkdtemp()
- self.addCleanup(shutil.rmtree, tmp_dir)
-
- o = Repo.init(os.path.join(tmp_dir, "s"), mkdir=True)
- o.close()
- os.symlink("foo", os.path.join(tmp_dir, "s", "bar"))
- o.stage("bar")
- o.do_commit(b"add symlink")
-
- t = o.clone(os.path.join(tmp_dir, "t"), symlinks=False)
- with open(os.path.join(tmp_dir, "t", "bar")) as f:
- self.assertEqual("foo", f.read())
-
- t.close()
-
- def test_clone_bare(self):
- r = self.open_repo("a.git")
- tmp_dir = self.mkdtemp()
- self.addCleanup(shutil.rmtree, tmp_dir)
- t = r.clone(tmp_dir, mkdir=False)
- t.close()
-
- def test_clone_checkout_and_bare(self):
- r = self.open_repo("a.git")
- tmp_dir = self.mkdtemp()
- self.addCleanup(shutil.rmtree, tmp_dir)
- self.assertRaises(
- ValueError, r.clone, tmp_dir, mkdir=False, checkout=True, bare=True
- )
-
- def test_clone_branch(self):
- r = self.open_repo("a.git")
- r.refs[b"refs/heads/mybranch"] = b"28237f4dc30d0d462658d6b937b08a0f0b6ef55a"
- tmp_dir = self.mkdtemp()
- self.addCleanup(shutil.rmtree, tmp_dir)
- with r.clone(tmp_dir, mkdir=False, branch=b"mybranch") as t:
- # HEAD should point to specified branch and not origin HEAD
- chain, sha = t.refs.follow(b"HEAD")
- self.assertEqual(chain[-1], b"refs/heads/mybranch")
- self.assertEqual(sha, b"28237f4dc30d0d462658d6b937b08a0f0b6ef55a")
- self.assertEqual(
- t.refs[b"refs/remotes/origin/HEAD"],
- b"a90fa2d900a17e99b433217e988c4eb4a2e9a097",
- )
-
- def test_clone_tag(self):
- r = self.open_repo("a.git")
- tmp_dir = self.mkdtemp()
- self.addCleanup(shutil.rmtree, tmp_dir)
- with r.clone(tmp_dir, mkdir=False, branch=b"mytag") as t:
- # HEAD should be detached (and not a symbolic ref) at tag
- self.assertEqual(
- t.refs.read_ref(b"HEAD"),
- b"28237f4dc30d0d462658d6b937b08a0f0b6ef55a",
- )
- self.assertEqual(
- t.refs[b"refs/remotes/origin/HEAD"],
- b"a90fa2d900a17e99b433217e988c4eb4a2e9a097",
- )
-
- def test_clone_invalid_branch(self):
- r = self.open_repo("a.git")
- tmp_dir = self.mkdtemp()
- self.addCleanup(shutil.rmtree, tmp_dir)
- self.assertRaises(
- ValueError,
- r.clone,
- tmp_dir,
- mkdir=False,
- branch=b"mybranch",
- )
-
- def test_merge_history(self):
- r = self.open_repo("simple_merge.git")
- shas = [e.commit.id for e in r.get_walker()]
- self.assertEqual(
- shas,
- [
- b"5dac377bdded4c9aeb8dff595f0faeebcc8498cc",
- b"ab64bbdcc51b170d21588e5c5d391ee5c0c96dfd",
- b"4cffe90e0a41ad3f5190079d7c8f036bde29cbe6",
- b"60dacdc733de308bb77bb76ce0fb0f9b44c9769e",
- b"0d89f20333fbb1d2f3a94da77f4981373d8f4310",
- ],
- )
-
- def test_out_of_order_merge(self):
- """Test that revision history is ordered by date, not parent order."""
- r = self.open_repo("ooo_merge.git")
- shas = [e.commit.id for e in r.get_walker()]
- self.assertEqual(
- shas,
- [
- b"7601d7f6231db6a57f7bbb79ee52e4d462fd44d1",
- b"f507291b64138b875c28e03469025b1ea20bc614",
- b"fb5b0425c7ce46959bec94d54b9a157645e114f5",
- b"f9e39b120c68182a4ba35349f832d0e4e61f485c",
- ],
- )
-
- def test_get_tags_empty(self):
- r = self.open_repo("ooo_merge.git")
- self.assertEqual({}, r.refs.as_dict(b"refs/tags"))
-
- def test_get_config(self):
- r = self.open_repo("ooo_merge.git")
- self.assertIsInstance(r.get_config(), Config)
-
- def test_get_config_stack(self):
- r = self.open_repo("ooo_merge.git")
- self.assertIsInstance(r.get_config_stack(), Config)
-
- def test_common_revisions(self):
- """This test demonstrates that ``find_common_revisions()`` actually
- returns common heads, not revisions; dulwich already uses
- ``find_common_revisions()`` in such a manner (see
- ``Repo.find_objects()``).
- """
- expected_shas = {b"60dacdc733de308bb77bb76ce0fb0f9b44c9769e"}
-
- # Source for objects.
- r_base = self.open_repo("simple_merge.git")
-
- # Re-create each-side of the merge in simple_merge.git.
- #
- # Since the trees and blobs are missing, the repository created is
- # corrupted, but we're only checking for commits for the purpose of
- # this test, so it's immaterial.
- r1_dir = self.mkdtemp()
- self.addCleanup(shutil.rmtree, r1_dir)
- r1_commits = [
- b"ab64bbdcc51b170d21588e5c5d391ee5c0c96dfd", # HEAD
- b"60dacdc733de308bb77bb76ce0fb0f9b44c9769e",
- b"0d89f20333fbb1d2f3a94da77f4981373d8f4310",
- ]
-
- r2_dir = self.mkdtemp()
- self.addCleanup(shutil.rmtree, r2_dir)
- r2_commits = [
- b"4cffe90e0a41ad3f5190079d7c8f036bde29cbe6", # HEAD
- b"60dacdc733de308bb77bb76ce0fb0f9b44c9769e",
- b"0d89f20333fbb1d2f3a94da77f4981373d8f4310",
- ]
-
- r1 = Repo.init_bare(r1_dir)
- for c in r1_commits:
- r1.object_store.add_object(r_base.get_object(c))
- r1.refs[b"HEAD"] = r1_commits[0]
-
- r2 = Repo.init_bare(r2_dir)
- for c in r2_commits:
- r2.object_store.add_object(r_base.get_object(c))
- r2.refs[b"HEAD"] = r2_commits[0]
-
- # Finally, the 'real' testing!
- shas = r2.object_store.find_common_revisions(r1.get_graph_walker())
- self.assertEqual(set(shas), expected_shas)
-
- shas = r1.object_store.find_common_revisions(r2.get_graph_walker())
- self.assertEqual(set(shas), expected_shas)
-
- def test_shell_hook_pre_commit(self):
- if os.name != "posix":
- self.skipTest("shell hook tests requires POSIX shell")
-
- pre_commit_fail = """#!/bin/sh
-exit 1
-"""
-
- pre_commit_success = """#!/bin/sh
-exit 0
-"""
-
- repo_dir = os.path.join(self.mkdtemp())
- self.addCleanup(shutil.rmtree, repo_dir)
- r = Repo.init(repo_dir)
- self.addCleanup(r.close)
-
- pre_commit = os.path.join(r.controldir(), "hooks", "pre-commit")
-
- with open(pre_commit, "w") as f:
- f.write(pre_commit_fail)
- os.chmod(pre_commit, stat.S_IREAD | stat.S_IWRITE | stat.S_IEXEC)
-
- self.assertRaises(
- errors.CommitError,
- r.do_commit,
- b"failed commit",
- committer=b"Test Committer <test@nodomain.com>",
- author=b"Test Author <test@nodomain.com>",
- commit_timestamp=12345,
- commit_timezone=0,
- author_timestamp=12345,
- author_timezone=0,
- )
-
- with open(pre_commit, "w") as f:
- f.write(pre_commit_success)
- os.chmod(pre_commit, stat.S_IREAD | stat.S_IWRITE | stat.S_IEXEC)
-
- commit_sha = r.do_commit(
- b"empty commit",
- committer=b"Test Committer <test@nodomain.com>",
- author=b"Test Author <test@nodomain.com>",
- commit_timestamp=12395,
- commit_timezone=0,
- author_timestamp=12395,
- author_timezone=0,
- )
- self.assertEqual([], r[commit_sha].parents)
-
- def test_shell_hook_commit_msg(self):
- if os.name != "posix":
- self.skipTest("shell hook tests requires POSIX shell")
-
- commit_msg_fail = """#!/bin/sh
-exit 1
-"""
-
- commit_msg_success = """#!/bin/sh
-exit 0
-"""
-
- repo_dir = self.mkdtemp()
- self.addCleanup(shutil.rmtree, repo_dir)
- r = Repo.init(repo_dir)
- self.addCleanup(r.close)
-
- commit_msg = os.path.join(r.controldir(), "hooks", "commit-msg")
-
- with open(commit_msg, "w") as f:
- f.write(commit_msg_fail)
- os.chmod(commit_msg, stat.S_IREAD | stat.S_IWRITE | stat.S_IEXEC)
-
- self.assertRaises(
- errors.CommitError,
- r.do_commit,
- b"failed commit",
- committer=b"Test Committer <test@nodomain.com>",
- author=b"Test Author <test@nodomain.com>",
- commit_timestamp=12345,
- commit_timezone=0,
- author_timestamp=12345,
- author_timezone=0,
- )
-
- with open(commit_msg, "w") as f:
- f.write(commit_msg_success)
- os.chmod(commit_msg, stat.S_IREAD | stat.S_IWRITE | stat.S_IEXEC)
-
- commit_sha = r.do_commit(
- b"empty commit",
- committer=b"Test Committer <test@nodomain.com>",
- author=b"Test Author <test@nodomain.com>",
- commit_timestamp=12395,
- commit_timezone=0,
- author_timestamp=12395,
- author_timezone=0,
- )
- self.assertEqual([], r[commit_sha].parents)
-
- def test_shell_hook_pre_commit_add_files(self):
- if os.name != "posix":
- self.skipTest("shell hook tests requires POSIX shell")
-
- pre_commit_contents = """#!{executable}
-import sys
-sys.path.extend({path!r})
-from dulwich.repo import Repo
-
-with open('foo', 'w') as f:
- f.write('newfile')
-
-r = Repo('.')
-r.stage(['foo'])
-""".format(
- executable=sys.executable,
- path=[os.path.join(os.path.dirname(__file__), "..", ".."), *sys.path],
- )
-
- repo_dir = os.path.join(self.mkdtemp())
- self.addCleanup(shutil.rmtree, repo_dir)
- r = Repo.init(repo_dir)
- self.addCleanup(r.close)
-
- with open(os.path.join(repo_dir, "blah"), "w") as f:
- f.write("blah")
-
- r.stage(["blah"])
-
- pre_commit = os.path.join(r.controldir(), "hooks", "pre-commit")
-
- with open(pre_commit, "w") as f:
- f.write(pre_commit_contents)
- os.chmod(pre_commit, stat.S_IREAD | stat.S_IWRITE | stat.S_IEXEC)
-
- commit_sha = r.do_commit(
- b"new commit",
- committer=b"Test Committer <test@nodomain.com>",
- author=b"Test Author <test@nodomain.com>",
- commit_timestamp=12395,
- commit_timezone=0,
- author_timestamp=12395,
- author_timezone=0,
- )
- self.assertEqual([], r[commit_sha].parents)
-
- tree = r[r[commit_sha].tree]
- self.assertEqual({b"blah", b"foo"}, set(tree))
-
- def test_shell_hook_post_commit(self):
- if os.name != "posix":
- self.skipTest("shell hook tests requires POSIX shell")
-
- repo_dir = self.mkdtemp()
- self.addCleanup(shutil.rmtree, repo_dir)
-
- r = Repo.init(repo_dir)
- self.addCleanup(r.close)
-
- (fd, path) = tempfile.mkstemp(dir=repo_dir)
- os.close(fd)
- post_commit_msg = (
- """#!/bin/sh
-rm """
- + path
- + """
-"""
- )
-
- root_sha = r.do_commit(
- b"empty commit",
- committer=b"Test Committer <test@nodomain.com>",
- author=b"Test Author <test@nodomain.com>",
- commit_timestamp=12345,
- commit_timezone=0,
- author_timestamp=12345,
- author_timezone=0,
- )
- self.assertEqual([], r[root_sha].parents)
-
- post_commit = os.path.join(r.controldir(), "hooks", "post-commit")
-
- with open(post_commit, "wb") as f:
- f.write(post_commit_msg.encode(locale.getpreferredencoding()))
- os.chmod(post_commit, stat.S_IREAD | stat.S_IWRITE | stat.S_IEXEC)
-
- commit_sha = r.do_commit(
- b"empty commit",
- committer=b"Test Committer <test@nodomain.com>",
- author=b"Test Author <test@nodomain.com>",
- commit_timestamp=12345,
- commit_timezone=0,
- author_timestamp=12345,
- author_timezone=0,
- )
- self.assertEqual([root_sha], r[commit_sha].parents)
-
- self.assertFalse(os.path.exists(path))
-
- post_commit_msg_fail = """#!/bin/sh
-exit 1
-"""
- with open(post_commit, "w") as f:
- f.write(post_commit_msg_fail)
- os.chmod(post_commit, stat.S_IREAD | stat.S_IWRITE | stat.S_IEXEC)
-
- warnings.simplefilter("always", UserWarning)
- self.addCleanup(warnings.resetwarnings)
- warnings_list, restore_warnings = setup_warning_catcher()
- self.addCleanup(restore_warnings)
-
- commit_sha2 = r.do_commit(
- b"empty commit",
- committer=b"Test Committer <test@nodomain.com>",
- author=b"Test Author <test@nodomain.com>",
- commit_timestamp=12345,
- commit_timezone=0,
- author_timestamp=12345,
- author_timezone=0,
- )
- expected_warning = UserWarning(
- "post-commit hook failed: Hook post-commit exited with "
- "non-zero status 1",
- )
- for w in warnings_list:
- if type(w) is type(expected_warning) and w.args == expected_warning.args:
- break
- else:
- raise AssertionError(
- f"Expected warning {expected_warning!r} not in {warnings_list!r}"
- )
- self.assertEqual([commit_sha], r[commit_sha2].parents)
-
- def test_as_dict(self):
- def check(repo):
- self.assertEqual(
- repo.refs.subkeys(b"refs/tags"),
- repo.refs.subkeys(b"refs/tags/"),
- )
- self.assertEqual(
- repo.refs.as_dict(b"refs/tags"),
- repo.refs.as_dict(b"refs/tags/"),
- )
- self.assertEqual(
- repo.refs.as_dict(b"refs/heads"),
- repo.refs.as_dict(b"refs/heads/"),
- )
-
- bare = self.open_repo("a.git")
- tmp_dir = self.mkdtemp()
- self.addCleanup(shutil.rmtree, tmp_dir)
- with bare.clone(tmp_dir, mkdir=False) as nonbare:
- check(nonbare)
- check(bare)
-
- def test_working_tree(self):
- temp_dir = tempfile.mkdtemp()
- self.addCleanup(shutil.rmtree, temp_dir)
- worktree_temp_dir = tempfile.mkdtemp()
- self.addCleanup(shutil.rmtree, worktree_temp_dir)
- r = Repo.init(temp_dir)
- self.addCleanup(r.close)
- root_sha = r.do_commit(
- b"empty commit",
- committer=b"Test Committer <test@nodomain.com>",
- author=b"Test Author <test@nodomain.com>",
- commit_timestamp=12345,
- commit_timezone=0,
- author_timestamp=12345,
- author_timezone=0,
- )
- r.refs[b"refs/heads/master"] = root_sha
- w = Repo._init_new_working_directory(worktree_temp_dir, r)
- self.addCleanup(w.close)
- new_sha = w.do_commit(
- b"new commit",
- committer=b"Test Committer <test@nodomain.com>",
- author=b"Test Author <test@nodomain.com>",
- commit_timestamp=12345,
- commit_timezone=0,
- author_timestamp=12345,
- author_timezone=0,
- )
- w.refs[b"HEAD"] = new_sha
- self.assertEqual(
- os.path.abspath(r.controldir()), os.path.abspath(w.commondir())
- )
- self.assertEqual(r.refs.keys(), w.refs.keys())
- self.assertNotEqual(r.head(), w.head())
-
-
-class BuildRepoRootTests(TestCase):
- """Tests that build on-disk repos from scratch.
-
- Repos live in a temp dir and are torn down after each test. They start with
- a single commit in master having single file named 'a'.
- """
-
- def get_repo_dir(self):
- return os.path.join(tempfile.mkdtemp(), "test")
-
- def setUp(self):
- super().setUp()
- self._repo_dir = self.get_repo_dir()
- os.makedirs(self._repo_dir)
- r = self._repo = Repo.init(self._repo_dir)
- self.addCleanup(tear_down_repo, r)
- self.assertFalse(r.bare)
- self.assertEqual(b"ref: refs/heads/master", r.refs.read_ref(b"HEAD"))
- self.assertRaises(KeyError, lambda: r.refs[b"refs/heads/master"])
-
- with open(os.path.join(r.path, "a"), "wb") as f:
- f.write(b"file contents")
- r.stage(["a"])
- commit_sha = r.do_commit(
- b"msg",
- committer=b"Test Committer <test@nodomain.com>",
- author=b"Test Author <test@nodomain.com>",
- commit_timestamp=12345,
- commit_timezone=0,
- author_timestamp=12345,
- author_timezone=0,
- )
- self.assertEqual([], r[commit_sha].parents)
- self._root_commit = commit_sha
-
- def test_get_shallow(self):
- self.assertEqual(set(), self._repo.get_shallow())
- with open(os.path.join(self._repo.path, ".git", "shallow"), "wb") as f:
- f.write(b"a90fa2d900a17e99b433217e988c4eb4a2e9a097\n")
- self.assertEqual(
- {b"a90fa2d900a17e99b433217e988c4eb4a2e9a097"},
- self._repo.get_shallow(),
- )
-
- def test_update_shallow(self):
- self._repo.update_shallow(None, None) # no op
- self.assertEqual(set(), self._repo.get_shallow())
- self._repo.update_shallow([b"a90fa2d900a17e99b433217e988c4eb4a2e9a097"], None)
- self.assertEqual(
- {b"a90fa2d900a17e99b433217e988c4eb4a2e9a097"},
- self._repo.get_shallow(),
- )
- self._repo.update_shallow(
- [b"a90fa2d900a17e99b433217e988c4eb4a2e9a097"],
- [b"f9e39b120c68182a4ba35349f832d0e4e61f485c"],
- )
- self.assertEqual(
- {b"a90fa2d900a17e99b433217e988c4eb4a2e9a097"},
- self._repo.get_shallow(),
- )
- self._repo.update_shallow(None, [b"a90fa2d900a17e99b433217e988c4eb4a2e9a097"])
- self.assertEqual(set(), self._repo.get_shallow())
- self.assertEqual(
- False,
- os.path.exists(os.path.join(self._repo.controldir(), "shallow")),
- )
-
- def test_build_repo(self):
- r = self._repo
- self.assertEqual(b"ref: refs/heads/master", r.refs.read_ref(b"HEAD"))
- self.assertEqual(self._root_commit, r.refs[b"refs/heads/master"])
- expected_blob = objects.Blob.from_string(b"file contents")
- self.assertEqual(expected_blob.data, r[expected_blob.id].data)
- actual_commit = r[self._root_commit]
- self.assertEqual(b"msg", actual_commit.message)
-
- def test_commit_modified(self):
- r = self._repo
- with open(os.path.join(r.path, "a"), "wb") as f:
- f.write(b"new contents")
- r.stage(["a"])
- commit_sha = r.do_commit(
- b"modified a",
- committer=b"Test Committer <test@nodomain.com>",
- author=b"Test Author <test@nodomain.com>",
- commit_timestamp=12395,
- commit_timezone=0,
- author_timestamp=12395,
- author_timezone=0,
- )
- self.assertEqual([self._root_commit], r[commit_sha].parents)
- a_mode, a_id = tree_lookup_path(r.get_object, r[commit_sha].tree, b"a")
- self.assertEqual(stat.S_IFREG | 0o644, a_mode)
- self.assertEqual(b"new contents", r[a_id].data)
-
- @skipIf(not getattr(os, "symlink", None), "Requires symlink support")
- def test_commit_symlink(self):
- r = self._repo
- os.symlink("a", os.path.join(r.path, "b"))
- r.stage(["a", "b"])
- commit_sha = r.do_commit(
- b"Symlink b",
- committer=b"Test Committer <test@nodomain.com>",
- author=b"Test Author <test@nodomain.com>",
- commit_timestamp=12395,
- commit_timezone=0,
- author_timestamp=12395,
- author_timezone=0,
- )
- self.assertEqual([self._root_commit], r[commit_sha].parents)
- b_mode, b_id = tree_lookup_path(r.get_object, r[commit_sha].tree, b"b")
- self.assertTrue(stat.S_ISLNK(b_mode))
- self.assertEqual(b"a", r[b_id].data)
-
- def test_commit_merge_heads_file(self):
- tmp_dir = tempfile.mkdtemp()
- self.addCleanup(shutil.rmtree, tmp_dir)
- r = Repo.init(tmp_dir)
- with open(os.path.join(r.path, "a"), "w") as f:
- f.write("initial text")
- c1 = r.do_commit(
- b"initial commit",
- committer=b"Test Committer <test@nodomain.com>",
- author=b"Test Author <test@nodomain.com>",
- commit_timestamp=12395,
- commit_timezone=0,
- author_timestamp=12395,
- author_timezone=0,
- )
- with open(os.path.join(r.path, "a"), "w") as f:
- f.write("merged text")
- with open(os.path.join(r.path, ".git", "MERGE_HEAD"), "w") as f:
- f.write("c27a2d21dd136312d7fa9e8baabb82561a1727d0\n")
- r.stage(["a"])
- commit_sha = r.do_commit(
- b"deleted a",
- committer=b"Test Committer <test@nodomain.com>",
- author=b"Test Author <test@nodomain.com>",
- commit_timestamp=12395,
- commit_timezone=0,
- author_timestamp=12395,
- author_timezone=0,
- )
- self.assertEqual(
- [c1, b"c27a2d21dd136312d7fa9e8baabb82561a1727d0"],
- r[commit_sha].parents,
- )
-
- def test_commit_deleted(self):
- r = self._repo
- os.remove(os.path.join(r.path, "a"))
- r.stage(["a"])
- commit_sha = r.do_commit(
- b"deleted a",
- committer=b"Test Committer <test@nodomain.com>",
- author=b"Test Author <test@nodomain.com>",
- commit_timestamp=12395,
- commit_timezone=0,
- author_timestamp=12395,
- author_timezone=0,
- )
- self.assertEqual([self._root_commit], r[commit_sha].parents)
- self.assertEqual([], list(r.open_index()))
- tree = r[r[commit_sha].tree]
- self.assertEqual([], list(tree.iteritems()))
-
- def test_commit_follows(self):
- r = self._repo
- r.refs.set_symbolic_ref(b"HEAD", b"refs/heads/bla")
- commit_sha = r.do_commit(
- b"commit with strange character",
- committer=b"Test Committer <test@nodomain.com>",
- author=b"Test Author <test@nodomain.com>",
- commit_timestamp=12395,
- commit_timezone=0,
- author_timestamp=12395,
- author_timezone=0,
- ref=b"HEAD",
- )
- self.assertEqual(commit_sha, r[b"refs/heads/bla"].id)
-
- def test_commit_encoding(self):
- r = self._repo
- commit_sha = r.do_commit(
- b"commit with strange character \xee",
- committer=b"Test Committer <test@nodomain.com>",
- author=b"Test Author <test@nodomain.com>",
- commit_timestamp=12395,
- commit_timezone=0,
- author_timestamp=12395,
- author_timezone=0,
- encoding=b"iso8859-1",
- )
- self.assertEqual(b"iso8859-1", r[commit_sha].encoding)
-
- def test_compression_level(self):
- r = self._repo
- c = r.get_config()
- c.set(("core",), "compression", "3")
- c.set(("core",), "looseCompression", "4")
- c.write_to_path()
- r = Repo(self._repo_dir)
- self.assertEqual(r.object_store.loose_compression_level, 4)
-
- def test_repositoryformatversion_unsupported(self):
- r = self._repo
- c = r.get_config()
- c.set(("core",), "repositoryformatversion", "2")
- c.write_to_path()
- self.assertRaises(UnsupportedVersion, Repo, self._repo_dir)
-
- def test_repositoryformatversion_1(self):
- r = self._repo
- c = r.get_config()
- c.set(("core",), "repositoryformatversion", "1")
- c.write_to_path()
- Repo(self._repo_dir)
-
- def test_worktreeconfig_extension(self):
- r = self._repo
- c = r.get_config()
- c.set(("core",), "repositoryformatversion", "1")
- c.set(("extensions",), "worktreeconfig", True)
- c.write_to_path()
- c = r.get_worktree_config()
- c.set(("user",), "repositoryformatversion", "1")
- c.set((b"user",), b"name", b"Jelmer")
- c.write_to_path()
- cs = r.get_config_stack()
- self.assertEqual(cs.get(("user",), "name"), b"Jelmer")
-
- def test_repositoryformatversion_1_extension(self):
- r = self._repo
- c = r.get_config()
- c.set(("core",), "repositoryformatversion", "1")
- c.set(("extensions",), "unknownextension", True)
- c.write_to_path()
- self.assertRaises(UnsupportedExtension, Repo, self._repo_dir)
-
- def test_commit_encoding_from_config(self):
- r = self._repo
- c = r.get_config()
- c.set(("i18n",), "commitEncoding", "iso8859-1")
- c.write_to_path()
- commit_sha = r.do_commit(
- b"commit with strange character \xee",
- committer=b"Test Committer <test@nodomain.com>",
- author=b"Test Author <test@nodomain.com>",
- commit_timestamp=12395,
- commit_timezone=0,
- author_timestamp=12395,
- author_timezone=0,
- )
- self.assertEqual(b"iso8859-1", r[commit_sha].encoding)
-
- def test_commit_config_identity(self):
- # commit falls back to the users' identity if it wasn't specified
- r = self._repo
- c = r.get_config()
- c.set((b"user",), b"name", b"Jelmer")
- c.set((b"user",), b"email", b"jelmer@apache.org")
- c.write_to_path()
- commit_sha = r.do_commit(b"message")
- self.assertEqual(b"Jelmer <jelmer@apache.org>", r[commit_sha].author)
- self.assertEqual(b"Jelmer <jelmer@apache.org>", r[commit_sha].committer)
-
- def test_commit_config_identity_strips_than(self):
- # commit falls back to the users' identity if it wasn't specified,
- # and strips superfluous <>
- r = self._repo
- c = r.get_config()
- c.set((b"user",), b"name", b"Jelmer")
- c.set((b"user",), b"email", b"<jelmer@apache.org>")
- c.write_to_path()
- commit_sha = r.do_commit(b"message")
- self.assertEqual(b"Jelmer <jelmer@apache.org>", r[commit_sha].author)
- self.assertEqual(b"Jelmer <jelmer@apache.org>", r[commit_sha].committer)
-
- def test_commit_config_identity_in_memoryrepo(self):
- # commit falls back to the users' identity if it wasn't specified
- r = MemoryRepo.init_bare([], {})
- c = r.get_config()
- c.set((b"user",), b"name", b"Jelmer")
- c.set((b"user",), b"email", b"jelmer@apache.org")
-
- commit_sha = r.do_commit(b"message", tree=objects.Tree().id)
- self.assertEqual(b"Jelmer <jelmer@apache.org>", r[commit_sha].author)
- self.assertEqual(b"Jelmer <jelmer@apache.org>", r[commit_sha].committer)
-
- def test_commit_config_identity_from_env(self):
- # commit falls back to the users' identity if it wasn't specified
- self.overrideEnv("GIT_COMMITTER_NAME", "joe")
- self.overrideEnv("GIT_COMMITTER_EMAIL", "joe@example.com")
- r = self._repo
- c = r.get_config()
- c.set((b"user",), b"name", b"Jelmer")
- c.set((b"user",), b"email", b"jelmer@apache.org")
- c.write_to_path()
- commit_sha = r.do_commit(b"message")
- self.assertEqual(b"Jelmer <jelmer@apache.org>", r[commit_sha].author)
- self.assertEqual(b"joe <joe@example.com>", r[commit_sha].committer)
-
- def test_commit_fail_ref(self):
- r = self._repo
-
- def set_if_equals(name, old_ref, new_ref, **kwargs):
- return False
-
- r.refs.set_if_equals = set_if_equals
-
- def add_if_new(name, new_ref, **kwargs):
- self.fail("Unexpected call to add_if_new")
-
- r.refs.add_if_new = add_if_new
-
- old_shas = set(r.object_store)
- self.assertRaises(
- errors.CommitError,
- r.do_commit,
- b"failed commit",
- committer=b"Test Committer <test@nodomain.com>",
- author=b"Test Author <test@nodomain.com>",
- commit_timestamp=12345,
- commit_timezone=0,
- author_timestamp=12345,
- author_timezone=0,
- )
- new_shas = set(r.object_store) - old_shas
- self.assertEqual(1, len(new_shas))
- # Check that the new commit (now garbage) was added.
- new_commit = r[new_shas.pop()]
- self.assertEqual(r[self._root_commit].tree, new_commit.tree)
- self.assertEqual(b"failed commit", new_commit.message)
-
- def test_commit_branch(self):
- r = self._repo
-
- commit_sha = r.do_commit(
- b"commit to branch",
- committer=b"Test Committer <test@nodomain.com>",
- author=b"Test Author <test@nodomain.com>",
- commit_timestamp=12395,
- commit_timezone=0,
- author_timestamp=12395,
- author_timezone=0,
- ref=b"refs/heads/new_branch",
- )
- self.assertEqual(self._root_commit, r[b"HEAD"].id)
- self.assertEqual(commit_sha, r[b"refs/heads/new_branch"].id)
- self.assertEqual([], r[commit_sha].parents)
- self.assertIn(b"refs/heads/new_branch", r)
-
- new_branch_head = commit_sha
-
- commit_sha = r.do_commit(
- b"commit to branch 2",
- committer=b"Test Committer <test@nodomain.com>",
- author=b"Test Author <test@nodomain.com>",
- commit_timestamp=12395,
- commit_timezone=0,
- author_timestamp=12395,
- author_timezone=0,
- ref=b"refs/heads/new_branch",
- )
- self.assertEqual(self._root_commit, r[b"HEAD"].id)
- self.assertEqual(commit_sha, r[b"refs/heads/new_branch"].id)
- self.assertEqual([new_branch_head], r[commit_sha].parents)
-
- def test_commit_merge_heads(self):
- r = self._repo
- merge_1 = r.do_commit(
- b"commit to branch 2",
- committer=b"Test Committer <test@nodomain.com>",
- author=b"Test Author <test@nodomain.com>",
- commit_timestamp=12395,
- commit_timezone=0,
- author_timestamp=12395,
- author_timezone=0,
- ref=b"refs/heads/new_branch",
- )
- commit_sha = r.do_commit(
- b"commit with merge",
- committer=b"Test Committer <test@nodomain.com>",
- author=b"Test Author <test@nodomain.com>",
- commit_timestamp=12395,
- commit_timezone=0,
- author_timestamp=12395,
- author_timezone=0,
- merge_heads=[merge_1],
- )
- self.assertEqual([self._root_commit, merge_1], r[commit_sha].parents)
-
- def test_commit_dangling_commit(self):
- r = self._repo
-
- old_shas = set(r.object_store)
- old_refs = r.get_refs()
- commit_sha = r.do_commit(
- b"commit with no ref",
- committer=b"Test Committer <test@nodomain.com>",
- author=b"Test Author <test@nodomain.com>",
- commit_timestamp=12395,
- commit_timezone=0,
- author_timestamp=12395,
- author_timezone=0,
- ref=None,
- )
- new_shas = set(r.object_store) - old_shas
-
- # New sha is added, but no new refs
- self.assertEqual(1, len(new_shas))
- new_commit = r[new_shas.pop()]
- self.assertEqual(r[self._root_commit].tree, new_commit.tree)
- self.assertEqual([], r[commit_sha].parents)
- self.assertEqual(old_refs, r.get_refs())
-
- def test_commit_dangling_commit_with_parents(self):
- r = self._repo
-
- old_shas = set(r.object_store)
- old_refs = r.get_refs()
- commit_sha = r.do_commit(
- b"commit with no ref",
- committer=b"Test Committer <test@nodomain.com>",
- author=b"Test Author <test@nodomain.com>",
- commit_timestamp=12395,
- commit_timezone=0,
- author_timestamp=12395,
- author_timezone=0,
- ref=None,
- merge_heads=[self._root_commit],
- )
- new_shas = set(r.object_store) - old_shas
-
- # New sha is added, but no new refs
- self.assertEqual(1, len(new_shas))
- new_commit = r[new_shas.pop()]
- self.assertEqual(r[self._root_commit].tree, new_commit.tree)
- self.assertEqual([self._root_commit], r[commit_sha].parents)
- self.assertEqual(old_refs, r.get_refs())
-
- def test_stage_absolute(self):
- r = self._repo
- os.remove(os.path.join(r.path, "a"))
- self.assertRaises(ValueError, r.stage, [os.path.join(r.path, "a")])
-
- def test_stage_deleted(self):
- r = self._repo
- os.remove(os.path.join(r.path, "a"))
- r.stage(["a"])
- r.stage(["a"]) # double-stage a deleted path
- self.assertEqual([], list(r.open_index()))
-
- def test_stage_directory(self):
- r = self._repo
- os.mkdir(os.path.join(r.path, "c"))
- r.stage(["c"])
- self.assertEqual([b"a"], list(r.open_index()))
-
- def test_stage_submodule(self):
- r = self._repo
- s = Repo.init(os.path.join(r.path, "sub"), mkdir=True)
- s.do_commit(b"message")
- r.stage(["sub"])
- self.assertEqual([b"a", b"sub"], list(r.open_index()))
-
- def test_unstage_midify_file_with_dir(self):
- os.mkdir(os.path.join(self._repo.path, "new_dir"))
- full_path = os.path.join(self._repo.path, "new_dir", "foo")
-
- with open(full_path, "w") as f:
- f.write("hello")
- porcelain.add(self._repo, paths=[full_path])
- porcelain.commit(
- self._repo,
- message=b"unitest",
- committer=b"Jane <jane@example.com>",
- author=b"John <john@example.com>",
- )
- with open(full_path, "a") as f:
- f.write("something new")
- self._repo.unstage(["new_dir/foo"])
- status = list(porcelain.status(self._repo))
- self.assertEqual(
- [{"add": [], "delete": [], "modify": []}, [b"new_dir/foo"], []], status
- )
-
- def test_unstage_while_no_commit(self):
- file = "foo"
- full_path = os.path.join(self._repo.path, file)
- with open(full_path, "w") as f:
- f.write("hello")
- porcelain.add(self._repo, paths=[full_path])
- self._repo.unstage([file])
- status = list(porcelain.status(self._repo))
- self.assertEqual([{"add": [], "delete": [], "modify": []}, [], ["foo"]], status)
-
- def test_unstage_add_file(self):
- file = "foo"
- full_path = os.path.join(self._repo.path, file)
- porcelain.commit(
- self._repo,
- message=b"unitest",
- committer=b"Jane <jane@example.com>",
- author=b"John <john@example.com>",
- )
- with open(full_path, "w") as f:
- f.write("hello")
- porcelain.add(self._repo, paths=[full_path])
- self._repo.unstage([file])
- status = list(porcelain.status(self._repo))
- self.assertEqual([{"add": [], "delete": [], "modify": []}, [], ["foo"]], status)
-
- def test_unstage_modify_file(self):
- file = "foo"
- full_path = os.path.join(self._repo.path, file)
- with open(full_path, "w") as f:
- f.write("hello")
- porcelain.add(self._repo, paths=[full_path])
- porcelain.commit(
- self._repo,
- message=b"unitest",
- committer=b"Jane <jane@example.com>",
- author=b"John <john@example.com>",
- )
- with open(full_path, "a") as f:
- f.write("broken")
- porcelain.add(self._repo, paths=[full_path])
- self._repo.unstage([file])
- status = list(porcelain.status(self._repo))
-
- self.assertEqual(
- [{"add": [], "delete": [], "modify": []}, [b"foo"], []], status
- )
-
- def test_unstage_remove_file(self):
- file = "foo"
- full_path = os.path.join(self._repo.path, file)
- with open(full_path, "w") as f:
- f.write("hello")
- porcelain.add(self._repo, paths=[full_path])
- porcelain.commit(
- self._repo,
- message=b"unitest",
- committer=b"Jane <jane@example.com>",
- author=b"John <john@example.com>",
- )
- os.remove(full_path)
- self._repo.unstage([file])
- status = list(porcelain.status(self._repo))
- self.assertEqual(
- [{"add": [], "delete": [], "modify": []}, [b"foo"], []], status
- )
-
- def test_reset_index(self):
- r = self._repo
- with open(os.path.join(r.path, "a"), "wb") as f:
- f.write(b"changed")
- with open(os.path.join(r.path, "b"), "wb") as f:
- f.write(b"added")
- r.stage(["a", "b"])
- status = list(porcelain.status(self._repo))
- self.assertEqual(
- [{"add": [b"b"], "delete": [], "modify": [b"a"]}, [], []], status
- )
- r.reset_index()
- status = list(porcelain.status(self._repo))
- self.assertEqual([{"add": [], "delete": [], "modify": []}, [], ["b"]], status)
-
- @skipIf(
- sys.platform in ("win32", "darwin"),
- "tries to implicitly decode as utf8",
- )
- def test_commit_no_encode_decode(self):
- r = self._repo
- repo_path_bytes = os.fsencode(r.path)
- encodings = ("utf8", "latin1")
- names = ["À".encode(encoding) for encoding in encodings]
- for name, encoding in zip(names, encodings):
- full_path = os.path.join(repo_path_bytes, name)
- with open(full_path, "wb") as f:
- f.write(encoding.encode("ascii"))
- # These files are break tear_down_repo, so cleanup these files
- # ourselves.
- self.addCleanup(os.remove, full_path)
-
- r.stage(names)
- commit_sha = r.do_commit(
- b"Files with different encodings",
- committer=b"Test Committer <test@nodomain.com>",
- author=b"Test Author <test@nodomain.com>",
- commit_timestamp=12395,
- commit_timezone=0,
- author_timestamp=12395,
- author_timezone=0,
- ref=None,
- merge_heads=[self._root_commit],
- )
-
- for name, encoding in zip(names, encodings):
- mode, id = tree_lookup_path(r.get_object, r[commit_sha].tree, name)
- self.assertEqual(stat.S_IFREG | 0o644, mode)
- self.assertEqual(encoding.encode("ascii"), r[id].data)
-
- def test_discover_intended(self):
- path = os.path.join(self._repo_dir, "b/c")
- r = Repo.discover(path)
- self.assertEqual(r.head(), self._repo.head())
-
- def test_discover_isrepo(self):
- r = Repo.discover(self._repo_dir)
- self.assertEqual(r.head(), self._repo.head())
-
- def test_discover_notrepo(self):
- with self.assertRaises(NotGitRepository):
- Repo.discover("/")
-
-
-class CheckUserIdentityTests(TestCase):
- def test_valid(self):
- check_user_identity(b"Me <me@example.com>")
-
- def test_invalid(self):
- self.assertRaises(InvalidUserIdentity, check_user_identity, b"No Email")
- self.assertRaises(
- InvalidUserIdentity, check_user_identity, b"Fullname <missing"
- )
- self.assertRaises(
- InvalidUserIdentity, check_user_identity, b"Fullname missing>"
- )
- self.assertRaises(
- InvalidUserIdentity, check_user_identity, b"Fullname >order<>"
- )
- self.assertRaises(
- InvalidUserIdentity, check_user_identity, b"Contains\0null byte <>"
- )
- self.assertRaises(
- InvalidUserIdentity, check_user_identity, b"Contains\nnewline byte <>"
- )
blob - ccbe09029f8130ff0e94669869d831009bcbb906 (mode 644)
blob + /dev/null
--- dulwich/tests/test_server.py
+++ /dev/null
-# test_server.py -- Tests for the git server
-# Copyright (C) 2010 Google, Inc.
-#
-# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
-# General Public License as public by the Free Software Foundation; version 2.0
-# or (at your option) any later version. You can redistribute it and/or
-# modify it under the terms of either of these two licenses.
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# You should have received a copy of the licenses; if not, see
-# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
-# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
-# License, Version 2.0.
-#
-
-"""Tests for the smart protocol server."""
-
-import os
-import shutil
-import sys
-import tempfile
-from io import BytesIO
-from typing import Dict, List
-
-from dulwich.tests import TestCase
-
-from ..errors import (
- GitProtocolError,
- HangupException,
- NotGitRepository,
- UnexpectedCommandError,
-)
-from ..object_store import MemoryObjectStore
-from ..objects import Tree
-from ..protocol import ZERO_SHA, format_capability_line
-from ..repo import MemoryRepo, Repo
-from ..server import (
- Backend,
- DictBackend,
- FileSystemBackend,
- MultiAckDetailedGraphWalkerImpl,
- MultiAckGraphWalkerImpl,
- PackHandler,
- ReceivePackHandler,
- SingleAckGraphWalkerImpl,
- UploadPackHandler,
- _find_shallow,
- _ProtocolGraphWalker,
- _split_proto_line,
- serve_command,
- update_server_info,
-)
-from .utils import make_commit, make_tag
-
-ONE = b"1" * 40
-TWO = b"2" * 40
-THREE = b"3" * 40
-FOUR = b"4" * 40
-FIVE = b"5" * 40
-SIX = b"6" * 40
-
-
-class TestProto:
- def __init__(self) -> None:
- self._output: List[bytes] = []
- self._received: Dict[int, List[bytes]] = {0: [], 1: [], 2: [], 3: []}
-
- def set_output(self, output_lines):
- self._output = output_lines
-
- def read_pkt_line(self):
- if self._output:
- data = self._output.pop(0)
- if data is not None:
- return data.rstrip() + b"\n"
- else:
- # flush-pkt ('0000').
- return None
- else:
- raise HangupException
-
- def write_sideband(self, band, data):
- self._received[band].append(data)
-
- def write_pkt_line(self, data):
- self._received[0].append(data)
-
- def get_received_line(self, band=0):
- lines = self._received[band]
- return lines.pop(0)
-
-
-class TestGenericPackHandler(PackHandler):
- def __init__(self) -> None:
- PackHandler.__init__(self, Backend(), None)
-
- @classmethod
- def capabilities(cls):
- return [b"cap1", b"cap2", b"cap3"]
-
- @classmethod
- def required_capabilities(cls):
- return [b"cap2"]
-
-
-class HandlerTestCase(TestCase):
- def setUp(self):
- super().setUp()
- self._handler = TestGenericPackHandler()
-
- def assertSucceeds(self, func, *args, **kwargs):
- try:
- func(*args, **kwargs)
- except GitProtocolError as e:
- self.fail(e)
-
- def test_capability_line(self):
- self.assertEqual(
- b" cap1 cap2 cap3",
- format_capability_line([b"cap1", b"cap2", b"cap3"]),
- )
-
- def test_set_client_capabilities(self):
- set_caps = self._handler.set_client_capabilities
- self.assertSucceeds(set_caps, [b"cap2"])
- self.assertSucceeds(set_caps, [b"cap1", b"cap2"])
-
- # different order
- self.assertSucceeds(set_caps, [b"cap3", b"cap1", b"cap2"])
-
- # error cases
- self.assertRaises(GitProtocolError, set_caps, [b"capxxx", b"cap2"])
- self.assertRaises(GitProtocolError, set_caps, [b"cap1", b"cap3"])
-
- # ignore innocuous but unknown capabilities
- self.assertRaises(GitProtocolError, set_caps, [b"cap2", b"ignoreme"])
- self.assertNotIn(b"ignoreme", self._handler.capabilities())
- self._handler.innocuous_capabilities = lambda: (b"ignoreme",)
- self.assertSucceeds(set_caps, [b"cap2", b"ignoreme"])
-
- def test_has_capability(self):
- self.assertRaises(GitProtocolError, self._handler.has_capability, b"cap")
- caps = self._handler.capabilities()
- self._handler.set_client_capabilities(caps)
- for cap in caps:
- self.assertTrue(self._handler.has_capability(cap))
- self.assertFalse(self._handler.has_capability(b"capxxx"))
-
-
-class UploadPackHandlerTestCase(TestCase):
- def setUp(self):
- super().setUp()
- self.path = tempfile.mkdtemp()
- self.addCleanup(shutil.rmtree, self.path)
- self.repo = Repo.init(self.path)
- self._repo = Repo.init_bare(self.path)
- backend = DictBackend({b"/": self._repo})
- self._handler = UploadPackHandler(
- backend, [b"/", b"host=lolcathost"], TestProto()
- )
-
- def test_progress(self):
- caps = self._handler.required_capabilities()
- self._handler.set_client_capabilities(caps)
- self._handler._start_pack_send_phase()
- self._handler.progress(b"first message")
- self._handler.progress(b"second message")
- self.assertEqual(b"first message", self._handler.proto.get_received_line(2))
- self.assertEqual(b"second message", self._handler.proto.get_received_line(2))
- self.assertRaises(IndexError, self._handler.proto.get_received_line, 2)
-
- def test_no_progress(self):
- caps = [*list(self._handler.required_capabilities()), b"no-progress"]
- self._handler.set_client_capabilities(caps)
- self._handler.progress(b"first message")
- self._handler.progress(b"second message")
- self.assertRaises(IndexError, self._handler.proto.get_received_line, 2)
-
- def test_get_tagged(self):
- refs = {
- b"refs/tags/tag1": ONE,
- b"refs/tags/tag2": TWO,
- b"refs/heads/master": FOUR, # not a tag, no peeled value
- }
- # repo needs to peel this object
- self._repo.object_store.add_object(make_commit(id=FOUR))
- for name, sha in refs.items():
- self._repo.refs[name] = sha
- peeled = {
- b"refs/tags/tag1": b"1234" * 10,
- b"refs/tags/tag2": b"5678" * 10,
- }
- self._repo.refs._peeled_refs = peeled
- self._repo.refs.add_packed_refs(refs)
-
- caps = [*list(self._handler.required_capabilities()), b"include-tag"]
- self._handler.set_client_capabilities(caps)
- self.assertEqual(
- {b"1234" * 10: ONE, b"5678" * 10: TWO},
- self._handler.get_tagged(refs, repo=self._repo),
- )
-
- # non-include-tag case
- caps = self._handler.required_capabilities()
- self._handler.set_client_capabilities(caps)
- self.assertEqual({}, self._handler.get_tagged(refs, repo=self._repo))
-
- def test_nothing_to_do_but_wants(self):
- # Just the fact that the client claims to want an object is enough
- # for sending a pack. Even if there turns out to be nothing.
- refs = {b"refs/tags/tag1": ONE}
- tree = Tree()
- self._repo.object_store.add_object(tree)
- self._repo.object_store.add_object(make_commit(id=ONE, tree=tree))
- for name, sha in refs.items():
- self._repo.refs[name] = sha
- self._handler.proto.set_output(
- [
- b"want " + ONE + b" side-band-64k thin-pack ofs-delta",
- None,
- b"have " + ONE,
- b"done",
- None,
- ]
- )
- self._handler.handle()
- # The server should always send a pack, even if it's empty.
- self.assertTrue(self._handler.proto.get_received_line(1).startswith(b"PACK"))
-
- def test_nothing_to_do_no_wants(self):
- # Don't send a pack if the client didn't ask for anything.
- refs = {b"refs/tags/tag1": ONE}
- tree = Tree()
- self._repo.object_store.add_object(tree)
- self._repo.object_store.add_object(make_commit(id=ONE, tree=tree))
- for ref, sha in refs.items():
- self._repo.refs[ref] = sha
- self._handler.proto.set_output([None])
- self._handler.handle()
- # The server should not send a pack, since the client didn't ask for
- # anything.
- self.assertEqual([], self._handler.proto._received[1])
-
-
-class FindShallowTests(TestCase):
- def setUp(self):
- super().setUp()
- self._store = MemoryObjectStore()
-
- def make_commit(self, **attrs):
- commit = make_commit(**attrs)
- self._store.add_object(commit)
- return commit
-
- def make_linear_commits(self, n, message=b""):
- commits = []
- parents = []
- for _ in range(n):
- commits.append(self.make_commit(parents=parents, message=message))
- parents = [commits[-1].id]
- return commits
-
- def assertSameElements(self, expected, actual):
- self.assertEqual(set(expected), set(actual))
-
- def test_linear(self):
- c1, c2, c3 = self.make_linear_commits(3)
-
- self.assertEqual(({c3.id}, set()), _find_shallow(self._store, [c3.id], 1))
- self.assertEqual(
- ({c2.id}, {c3.id}),
- _find_shallow(self._store, [c3.id], 2),
- )
- self.assertEqual(
- ({c1.id}, {c2.id, c3.id}),
- _find_shallow(self._store, [c3.id], 3),
- )
- self.assertEqual(
- (set(), {c1.id, c2.id, c3.id}),
- _find_shallow(self._store, [c3.id], 4),
- )
-
- def test_multiple_independent(self):
- a = self.make_linear_commits(2, message=b"a")
- b = self.make_linear_commits(2, message=b"b")
- c = self.make_linear_commits(2, message=b"c")
- heads = [a[1].id, b[1].id, c[1].id]
-
- self.assertEqual(
- ({a[0].id, b[0].id, c[0].id}, set(heads)),
- _find_shallow(self._store, heads, 2),
- )
-
- def test_multiple_overlapping(self):
- # Create the following commit tree:
- # 1--2
- # \
- # 3--4
- c1, c2 = self.make_linear_commits(2)
- c3 = self.make_commit(parents=[c1.id])
- c4 = self.make_commit(parents=[c3.id])
-
- # 1 is shallow along the path from 4, but not along the path from 2.
- self.assertEqual(
- ({c1.id}, {c1.id, c2.id, c3.id, c4.id}),
- _find_shallow(self._store, [c2.id, c4.id], 3),
- )
-
- def test_merge(self):
- c1 = self.make_commit()
- c2 = self.make_commit()
- c3 = self.make_commit(parents=[c1.id, c2.id])
-
- self.assertEqual(
- ({c1.id, c2.id}, {c3.id}),
- _find_shallow(self._store, [c3.id], 2),
- )
-
- def test_tag(self):
- c1, c2 = self.make_linear_commits(2)
- tag = make_tag(c2, name=b"tag")
- self._store.add_object(tag)
-
- self.assertEqual(
- ({c1.id}, {c2.id}),
- _find_shallow(self._store, [tag.id], 2),
- )
-
-
-class TestUploadPackHandler(UploadPackHandler):
- @classmethod
- def required_capabilities(self):
- return []
-
-
-class ReceivePackHandlerTestCase(TestCase):
- def setUp(self):
- super().setUp()
- self._repo = MemoryRepo.init_bare([], {})
- backend = DictBackend({b"/": self._repo})
- self._handler = ReceivePackHandler(
- backend, [b"/", b"host=lolcathost"], TestProto()
- )
-
- def test_apply_pack_del_ref(self):
- refs = {b"refs/heads/master": TWO, b"refs/heads/fake-branch": ONE}
- self._repo.refs._update(refs)
- update_refs = [
- [ONE, ZERO_SHA, b"refs/heads/fake-branch"],
- ]
- self._handler.set_client_capabilities([b"delete-refs"])
- status = self._handler._apply_pack(update_refs)
- self.assertEqual(status[0][0], b"unpack")
- self.assertEqual(status[0][1], b"ok")
- self.assertEqual(status[1][0], b"refs/heads/fake-branch")
- self.assertEqual(status[1][1], b"ok")
-
-
-class ProtocolGraphWalkerEmptyTestCase(TestCase):
- def setUp(self):
- super().setUp()
- self._repo = MemoryRepo.init_bare([], {})
- backend = DictBackend({b"/": self._repo})
- self._walker = _ProtocolGraphWalker(
- TestUploadPackHandler(backend, [b"/", b"host=lolcats"], TestProto()),
- self._repo.object_store,
- self._repo.get_peeled,
- self._repo.refs.get_symrefs,
- )
-
- def test_empty_repository(self):
- # The server should wait for a flush packet.
- self._walker.proto.set_output([])
- self.assertRaises(HangupException, self._walker.determine_wants, {})
- self.assertEqual(None, self._walker.proto.get_received_line())
-
- self._walker.proto.set_output([None])
- self.assertEqual([], self._walker.determine_wants({}))
- self.assertEqual(None, self._walker.proto.get_received_line())
-
-
-class ProtocolGraphWalkerTestCase(TestCase):
- def setUp(self):
- super().setUp()
- # Create the following commit tree:
- # 3---5
- # /
- # 1---2---4
- commits = [
- make_commit(id=ONE, parents=[], commit_time=111),
- make_commit(id=TWO, parents=[ONE], commit_time=222),
- make_commit(id=THREE, parents=[ONE], commit_time=333),
- make_commit(id=FOUR, parents=[TWO], commit_time=444),
- make_commit(id=FIVE, parents=[THREE], commit_time=555),
- ]
- self._repo = MemoryRepo.init_bare(commits, {})
- backend = DictBackend({b"/": self._repo})
- self._walker = _ProtocolGraphWalker(
- TestUploadPackHandler(backend, [b"/", b"host=lolcats"], TestProto()),
- self._repo.object_store,
- self._repo.get_peeled,
- self._repo.refs.get_symrefs,
- )
-
- def test_all_wants_satisfied_no_haves(self):
- self._walker.set_wants([ONE])
- self.assertFalse(self._walker.all_wants_satisfied([]))
- self._walker.set_wants([TWO])
- self.assertFalse(self._walker.all_wants_satisfied([]))
- self._walker.set_wants([THREE])
- self.assertFalse(self._walker.all_wants_satisfied([]))
-
- def test_all_wants_satisfied_have_root(self):
- self._walker.set_wants([ONE])
- self.assertTrue(self._walker.all_wants_satisfied([ONE]))
- self._walker.set_wants([TWO])
- self.assertTrue(self._walker.all_wants_satisfied([ONE]))
- self._walker.set_wants([THREE])
- self.assertTrue(self._walker.all_wants_satisfied([ONE]))
-
- def test_all_wants_satisfied_have_branch(self):
- self._walker.set_wants([TWO])
- self.assertTrue(self._walker.all_wants_satisfied([TWO]))
- # wrong branch
- self._walker.set_wants([THREE])
- self.assertFalse(self._walker.all_wants_satisfied([TWO]))
-
- def test_all_wants_satisfied(self):
- self._walker.set_wants([FOUR, FIVE])
- # trivial case: wants == haves
- self.assertTrue(self._walker.all_wants_satisfied([FOUR, FIVE]))
- # cases that require walking the commit tree
- self.assertTrue(self._walker.all_wants_satisfied([ONE]))
- self.assertFalse(self._walker.all_wants_satisfied([TWO]))
- self.assertFalse(self._walker.all_wants_satisfied([THREE]))
- self.assertTrue(self._walker.all_wants_satisfied([TWO, THREE]))
-
- def test_split_proto_line(self):
- allowed = (b"want", b"done", None)
- self.assertEqual(
- (b"want", ONE), _split_proto_line(b"want " + ONE + b"\n", allowed)
- )
- self.assertEqual(
- (b"want", TWO), _split_proto_line(b"want " + TWO + b"\n", allowed)
- )
- self.assertRaises(GitProtocolError, _split_proto_line, b"want xxxx\n", allowed)
- self.assertRaises(
- UnexpectedCommandError,
- _split_proto_line,
- b"have " + THREE + b"\n",
- allowed,
- )
- self.assertRaises(
- GitProtocolError,
- _split_proto_line,
- b"foo " + FOUR + b"\n",
- allowed,
- )
- self.assertRaises(GitProtocolError, _split_proto_line, b"bar", allowed)
- self.assertEqual((b"done", None), _split_proto_line(b"done\n", allowed))
- self.assertEqual((None, None), _split_proto_line(b"", allowed))
-
- def test_determine_wants(self):
- self._walker.proto.set_output([None])
- self.assertEqual([], self._walker.determine_wants({}))
- self.assertEqual(None, self._walker.proto.get_received_line())
-
- self._walker.proto.set_output(
- [
- b"want " + ONE + b" multi_ack",
- b"want " + TWO,
- None,
- ]
- )
- heads = {
- b"refs/heads/ref1": ONE,
- b"refs/heads/ref2": TWO,
- b"refs/heads/ref3": THREE,
- }
- self._repo.refs._update(heads)
- self.assertEqual([ONE, TWO], self._walker.determine_wants(heads))
-
- self._walker.advertise_refs = True
- self.assertEqual([], self._walker.determine_wants(heads))
- self._walker.advertise_refs = False
-
- self._walker.proto.set_output([b"want " + FOUR + b" multi_ack", None])
- self.assertRaises(GitProtocolError, self._walker.determine_wants, heads)
-
- self._walker.proto.set_output([None])
- self.assertEqual([], self._walker.determine_wants(heads))
-
- self._walker.proto.set_output([b"want " + ONE + b" multi_ack", b"foo", None])
- self.assertRaises(GitProtocolError, self._walker.determine_wants, heads)
-
- self._walker.proto.set_output([b"want " + FOUR + b" multi_ack", None])
- self.assertRaises(GitProtocolError, self._walker.determine_wants, heads)
-
- def test_determine_wants_advertisement(self):
- self._walker.proto.set_output([None])
- # advertise branch tips plus tag
- heads = {
- b"refs/heads/ref4": FOUR,
- b"refs/heads/ref5": FIVE,
- b"refs/heads/tag6": SIX,
- }
- self._repo.refs._update(heads)
- self._repo.refs._update_peeled(heads)
- self._repo.refs._update_peeled({b"refs/heads/tag6": FIVE})
- self._walker.determine_wants(heads)
- lines = []
- while True:
- line = self._walker.proto.get_received_line()
- if line is None:
- break
- # strip capabilities list if present
- if b"\x00" in line:
- line = line[: line.index(b"\x00")]
- lines.append(line.rstrip())
-
- self.assertEqual(
- [
- FOUR + b" refs/heads/ref4",
- FIVE + b" refs/heads/ref5",
- FIVE + b" refs/heads/tag6^{}",
- SIX + b" refs/heads/tag6",
- ],
- sorted(lines),
- )
-
- # ensure peeled tag was advertised immediately following tag
- for i, line in enumerate(lines):
- if line.endswith(b" refs/heads/tag6"):
- self.assertEqual(FIVE + b" refs/heads/tag6^{}", lines[i + 1])
-
- # TODO: test commit time cutoff
-
- def _handle_shallow_request(self, lines, heads):
- self._walker.proto.set_output([*lines, None])
- self._walker._handle_shallow_request(heads)
-
- def assertReceived(self, expected):
- self.assertEqual(
- expected, list(iter(self._walker.proto.get_received_line, None))
- )
-
- def test_handle_shallow_request_no_client_shallows(self):
- self._handle_shallow_request([b"deepen 2\n"], [FOUR, FIVE])
- self.assertEqual({TWO, THREE}, self._walker.shallow)
- self.assertReceived(
- [
- b"shallow " + TWO,
- b"shallow " + THREE,
- ]
- )
-
- def test_handle_shallow_request_no_new_shallows(self):
- lines = [
- b"shallow " + TWO + b"\n",
- b"shallow " + THREE + b"\n",
- b"deepen 2\n",
- ]
- self._handle_shallow_request(lines, [FOUR, FIVE])
- self.assertEqual({TWO, THREE}, self._walker.shallow)
- self.assertReceived([])
-
- def test_handle_shallow_request_unshallows(self):
- lines = [
- b"shallow " + TWO + b"\n",
- b"deepen 3\n",
- ]
- self._handle_shallow_request(lines, [FOUR, FIVE])
- self.assertEqual({ONE}, self._walker.shallow)
- self.assertReceived(
- [
- b"shallow " + ONE,
- b"unshallow " + TWO,
- # THREE is unshallow but was is not shallow in the client
- ]
- )
-
-
-class TestProtocolGraphWalker:
- def __init__(self) -> None:
- self.acks: List[bytes] = []
- self.lines: List[bytes] = []
- self.wants_satisified = False
- self.stateless_rpc = None
- self.advertise_refs = False
- self._impl = None
- self.done_required = True
- self.done_received = False
- self._empty = False
- self.pack_sent = False
-
- def read_proto_line(self, allowed):
- command, sha = self.lines.pop(0)
- if allowed is not None:
- assert command in allowed
- return command, sha
-
- def send_ack(self, sha, ack_type=b""):
- self.acks.append((sha, ack_type))
-
- def send_nak(self):
- self.acks.append((None, b"nak"))
-
- def all_wants_satisfied(self, haves):
- if haves:
- return self.wants_satisified
-
- def pop_ack(self):
- if not self.acks:
- return None
- return self.acks.pop(0)
-
- def handle_done(self):
- if not self._impl:
- return
- # Whether or not PACK is sent after is determined by this, so
- # record this value.
- self.pack_sent = self._impl.handle_done(self.done_required, self.done_received)
- return self.pack_sent
-
- def notify_done(self):
- self.done_received = True
-
-
-class AckGraphWalkerImplTestCase(TestCase):
- """Base setup and asserts for AckGraphWalker tests."""
-
- def setUp(self):
- super().setUp()
- self._walker = TestProtocolGraphWalker()
- self._walker.lines = [
- (b"have", TWO),
- (b"have", ONE),
- (b"have", THREE),
- (b"done", None),
- ]
- self._impl = self.impl_cls(self._walker)
- self._walker._impl = self._impl
-
- def assertNoAck(self):
- self.assertEqual(None, self._walker.pop_ack())
-
- def assertAcks(self, acks):
- for sha, ack_type in acks:
- self.assertEqual((sha, ack_type), self._walker.pop_ack())
- self.assertNoAck()
-
- def assertAck(self, sha, ack_type=b""):
- self.assertAcks([(sha, ack_type)])
-
- def assertNak(self):
- self.assertAck(None, b"nak")
-
- def assertNextEquals(self, sha):
- self.assertEqual(sha, next(self._impl))
-
- def assertNextEmpty(self):
- # This is necessary because of no-done - the assumption that it
- # it safe to immediately send out the final ACK is no longer
- # true but the test is still needed for it. TestProtocolWalker
- # does implement the handle_done which will determine whether
- # the final confirmation can be sent.
- self.assertRaises(IndexError, next, self._impl)
- self._walker.handle_done()
-
-
-class SingleAckGraphWalkerImplTestCase(AckGraphWalkerImplTestCase):
- impl_cls = SingleAckGraphWalkerImpl
-
- def test_single_ack(self):
- self.assertNextEquals(TWO)
- self.assertNoAck()
-
- self.assertNextEquals(ONE)
- self._impl.ack(ONE)
- self.assertAck(ONE)
-
- self.assertNextEquals(THREE)
- self._impl.ack(THREE)
- self.assertNoAck()
-
- self.assertNextEquals(None)
- self.assertNoAck()
-
- def test_single_ack_flush(self):
- # same as ack test but ends with a flush-pkt instead of done
- self._walker.lines[-1] = (None, None)
-
- self.assertNextEquals(TWO)
- self.assertNoAck()
-
- self.assertNextEquals(ONE)
- self._impl.ack(ONE)
- self.assertAck(ONE)
-
- self.assertNextEquals(THREE)
- self.assertNoAck()
-
- self.assertNextEquals(None)
- self.assertNoAck()
-
- def test_single_ack_nak(self):
- self.assertNextEquals(TWO)
- self.assertNoAck()
-
- self.assertNextEquals(ONE)
- self.assertNoAck()
-
- self.assertNextEquals(THREE)
- self.assertNoAck()
-
- self.assertNextEquals(None)
- self.assertNextEmpty()
- self.assertNak()
-
- def test_single_ack_nak_flush(self):
- # same as nak test but ends with a flush-pkt instead of done
- self._walker.lines[-1] = (None, None)
-
- self.assertNextEquals(TWO)
- self.assertNoAck()
-
- self.assertNextEquals(ONE)
- self.assertNoAck()
-
- self.assertNextEquals(THREE)
- self.assertNoAck()
-
- self.assertNextEquals(None)
- self.assertNextEmpty()
- self.assertNak()
-
-
-class MultiAckGraphWalkerImplTestCase(AckGraphWalkerImplTestCase):
- impl_cls = MultiAckGraphWalkerImpl
-
- def test_multi_ack(self):
- self.assertNextEquals(TWO)
- self.assertNoAck()
-
- self.assertNextEquals(ONE)
- self._impl.ack(ONE)
- self.assertAck(ONE, b"continue")
-
- self.assertNextEquals(THREE)
- self._impl.ack(THREE)
- self.assertAck(THREE, b"continue")
-
- self.assertNextEquals(None)
- self.assertNextEmpty()
- self.assertAck(THREE)
-
- def test_multi_ack_partial(self):
- self.assertNextEquals(TWO)
- self.assertNoAck()
-
- self.assertNextEquals(ONE)
- self._impl.ack(ONE)
- self.assertAck(ONE, b"continue")
-
- self.assertNextEquals(THREE)
- self.assertNoAck()
-
- self.assertNextEquals(None)
- self.assertNextEmpty()
- self.assertAck(ONE)
-
- def test_multi_ack_flush(self):
- self._walker.lines = [
- (b"have", TWO),
- (None, None),
- (b"have", ONE),
- (b"have", THREE),
- (b"done", None),
- ]
- self.assertNextEquals(TWO)
- self.assertNoAck()
-
- self.assertNextEquals(ONE)
- self.assertNak() # nak the flush-pkt
-
- self._impl.ack(ONE)
- self.assertAck(ONE, b"continue")
-
- self.assertNextEquals(THREE)
- self._impl.ack(THREE)
- self.assertAck(THREE, b"continue")
-
- self.assertNextEquals(None)
- self.assertNextEmpty()
- self.assertAck(THREE)
-
- def test_multi_ack_nak(self):
- self.assertNextEquals(TWO)
- self.assertNoAck()
-
- self.assertNextEquals(ONE)
- self.assertNoAck()
-
- self.assertNextEquals(THREE)
- self.assertNoAck()
-
- self.assertNextEquals(None)
- self.assertNextEmpty()
- self.assertNak()
-
-
-class MultiAckDetailedGraphWalkerImplTestCase(AckGraphWalkerImplTestCase):
- impl_cls = MultiAckDetailedGraphWalkerImpl
-
- def test_multi_ack(self):
- self.assertNextEquals(TWO)
- self.assertNoAck()
-
- self.assertNextEquals(ONE)
- self._impl.ack(ONE)
- self.assertAck(ONE, b"common")
-
- self.assertNextEquals(THREE)
- self._impl.ack(THREE)
- self.assertAck(THREE, b"common")
-
- # done is read.
- self._walker.wants_satisified = True
- self.assertNextEquals(None)
- self._walker.lines.append((None, None))
- self.assertNextEmpty()
- self.assertAcks([(THREE, b"ready"), (None, b"nak"), (THREE, b"")])
- # PACK is sent
- self.assertTrue(self._walker.pack_sent)
-
- def test_multi_ack_nodone(self):
- self._walker.done_required = False
- self.assertNextEquals(TWO)
- self.assertNoAck()
-
- self.assertNextEquals(ONE)
- self._impl.ack(ONE)
- self.assertAck(ONE, b"common")
-
- self.assertNextEquals(THREE)
- self._impl.ack(THREE)
- self.assertAck(THREE, b"common")
-
- # done is read.
- self._walker.wants_satisified = True
- self.assertNextEquals(None)
- self._walker.lines.append((None, None))
- self.assertNextEmpty()
- self.assertAcks([(THREE, b"ready"), (None, b"nak"), (THREE, b"")])
- # PACK is sent
- self.assertTrue(self._walker.pack_sent)
-
- def test_multi_ack_flush_end(self):
- # transmission ends with a flush-pkt without a done but no-done is
- # assumed.
- self._walker.lines[-1] = (None, None)
- self.assertNextEquals(TWO)
- self.assertNoAck()
-
- self.assertNextEquals(ONE)
- self._impl.ack(ONE)
- self.assertAck(ONE, b"common")
-
- self.assertNextEquals(THREE)
- self._impl.ack(THREE)
- self.assertAck(THREE, b"common")
-
- # no done is read
- self._walker.wants_satisified = True
- self.assertNextEmpty()
- self.assertAcks([(THREE, b"ready"), (None, b"nak")])
- # PACK is NOT sent
- self.assertFalse(self._walker.pack_sent)
-
- def test_multi_ack_flush_end_nodone(self):
- # transmission ends with a flush-pkt without a done but no-done is
- # assumed.
- self._walker.lines[-1] = (None, None)
- self._walker.done_required = False
- self.assertNextEquals(TWO)
- self.assertNoAck()
-
- self.assertNextEquals(ONE)
- self._impl.ack(ONE)
- self.assertAck(ONE, b"common")
-
- self.assertNextEquals(THREE)
- self._impl.ack(THREE)
- self.assertAck(THREE, b"common")
-
- # no done is read, but pretend it is (last 'ACK 'commit_id' '')
- self._walker.wants_satisified = True
- self.assertNextEmpty()
- self.assertAcks([(THREE, b"ready"), (None, b"nak"), (THREE, b"")])
- # PACK is sent
- self.assertTrue(self._walker.pack_sent)
-
- def test_multi_ack_partial(self):
- self.assertNextEquals(TWO)
- self.assertNoAck()
-
- self.assertNextEquals(ONE)
- self._impl.ack(ONE)
- self.assertAck(ONE, b"common")
-
- self.assertNextEquals(THREE)
- self.assertNoAck()
-
- self.assertNextEquals(None)
- self.assertNextEmpty()
- self.assertAck(ONE)
-
- def test_multi_ack_flush(self):
- # same as ack test but contains a flush-pkt in the middle
- self._walker.lines = [
- (b"have", TWO),
- (None, None),
- (b"have", ONE),
- (b"have", THREE),
- (b"done", None),
- (None, None),
- ]
- self.assertNextEquals(TWO)
- self.assertNoAck()
-
- self.assertNextEquals(ONE)
- self.assertNak() # nak the flush-pkt
-
- self._impl.ack(ONE)
- self.assertAck(ONE, b"common")
-
- self.assertNextEquals(THREE)
- self._impl.ack(THREE)
- self.assertAck(THREE, b"common")
-
- self._walker.wants_satisified = True
- self.assertNextEquals(None)
- self.assertNextEmpty()
- self.assertAcks([(THREE, b"ready"), (None, b"nak"), (THREE, b"")])
-
- def test_multi_ack_nak(self):
- self.assertNextEquals(TWO)
- self.assertNoAck()
-
- self.assertNextEquals(ONE)
- self.assertNoAck()
-
- self.assertNextEquals(THREE)
- self.assertNoAck()
-
- # Done is sent here.
- self.assertNextEquals(None)
- self.assertNextEmpty()
- self.assertNak()
- self.assertNextEmpty()
- self.assertTrue(self._walker.pack_sent)
-
- def test_multi_ack_nak_nodone(self):
- self._walker.done_required = False
- self.assertNextEquals(TWO)
- self.assertNoAck()
-
- self.assertNextEquals(ONE)
- self.assertNoAck()
-
- self.assertNextEquals(THREE)
- self.assertNoAck()
-
- # Done is sent here.
- self.assertFalse(self._walker.pack_sent)
- self.assertNextEquals(None)
- self.assertNextEmpty()
- self.assertTrue(self._walker.pack_sent)
- self.assertNak()
- self.assertNextEmpty()
-
- def test_multi_ack_nak_flush(self):
- # same as nak test but contains a flush-pkt in the middle
- self._walker.lines = [
- (b"have", TWO),
- (None, None),
- (b"have", ONE),
- (b"have", THREE),
- (b"done", None),
- ]
- self.assertNextEquals(TWO)
- self.assertNoAck()
-
- self.assertNextEquals(ONE)
- self.assertNak()
-
- self.assertNextEquals(THREE)
- self.assertNoAck()
-
- self.assertNextEquals(None)
- self.assertNextEmpty()
- self.assertNak()
-
- def test_multi_ack_stateless(self):
- # transmission ends with a flush-pkt
- self._walker.lines[-1] = (None, None)
- self._walker.stateless_rpc = True
-
- self.assertNextEquals(TWO)
- self.assertNoAck()
-
- self.assertNextEquals(ONE)
- self.assertNoAck()
-
- self.assertNextEquals(THREE)
- self.assertNoAck()
-
- self.assertFalse(self._walker.pack_sent)
- self.assertNextEquals(None)
- self.assertNak()
-
- self.assertNextEmpty()
- self.assertNoAck()
- self.assertFalse(self._walker.pack_sent)
-
- def test_multi_ack_stateless_nodone(self):
- self._walker.done_required = False
- # transmission ends with a flush-pkt
- self._walker.lines[-1] = (None, None)
- self._walker.stateless_rpc = True
-
- self.assertNextEquals(TWO)
- self.assertNoAck()
-
- self.assertNextEquals(ONE)
- self.assertNoAck()
-
- self.assertNextEquals(THREE)
- self.assertNoAck()
-
- self.assertFalse(self._walker.pack_sent)
- self.assertNextEquals(None)
- self.assertNak()
-
- self.assertNextEmpty()
- self.assertNoAck()
- # PACK will still not be sent.
- self.assertFalse(self._walker.pack_sent)
-
-
-class FileSystemBackendTests(TestCase):
- """Tests for FileSystemBackend."""
-
- def setUp(self):
- super().setUp()
- self.path = tempfile.mkdtemp()
- self.addCleanup(shutil.rmtree, self.path)
- self.repo = Repo.init(self.path)
- if sys.platform == "win32":
- self.backend = FileSystemBackend(self.path[0] + ":" + os.sep)
- else:
- self.backend = FileSystemBackend()
-
- def test_nonexistant(self):
- self.assertRaises(
- NotGitRepository,
- self.backend.open_repository,
- "/does/not/exist/unless/foo",
- )
-
- def test_absolute(self):
- repo = self.backend.open_repository(self.path)
- self.assertTrue(
- os.path.samefile(
- os.path.abspath(repo.path), os.path.abspath(self.repo.path)
- )
- )
-
- def test_child(self):
- self.assertRaises(
- NotGitRepository,
- self.backend.open_repository,
- os.path.join(self.path, "foo"),
- )
-
- def test_bad_repo_path(self):
- backend = FileSystemBackend()
-
- self.assertRaises(NotGitRepository, lambda: backend.open_repository("/ups"))
-
-
-class DictBackendTests(TestCase):
- """Tests for DictBackend."""
-
- def test_nonexistant(self):
- repo = MemoryRepo.init_bare([], {})
- backend = DictBackend({b"/": repo})
- self.assertRaises(
- NotGitRepository,
- backend.open_repository,
- "/does/not/exist/unless/foo",
- )
-
- def test_bad_repo_path(self):
- repo = MemoryRepo.init_bare([], {})
- backend = DictBackend({b"/": repo})
-
- self.assertRaises(NotGitRepository, lambda: backend.open_repository("/ups"))
-
-
-class ServeCommandTests(TestCase):
- """Tests for serve_command."""
-
- def setUp(self):
- super().setUp()
- self.backend = DictBackend({})
-
- def serve_command(self, handler_cls, args, inf, outf):
- return serve_command(
- handler_cls,
- [b"test", *args],
- backend=self.backend,
- inf=inf,
- outf=outf,
- )
-
- def test_receive_pack(self):
- commit = make_commit(id=ONE, parents=[], commit_time=111)
- self.backend.repos[b"/"] = MemoryRepo.init_bare(
- [commit], {b"refs/heads/master": commit.id}
- )
- outf = BytesIO()
- exitcode = self.serve_command(
- ReceivePackHandler, [b"/"], BytesIO(b"0000"), outf
- )
- outlines = outf.getvalue().splitlines()
- self.assertEqual(2, len(outlines))
- self.assertEqual(
- b"1111111111111111111111111111111111111111 refs/heads/master",
- outlines[0][4:].split(b"\x00")[0],
- )
- self.assertEqual(b"0000", outlines[-1])
- self.assertEqual(0, exitcode)
-
-
-class UpdateServerInfoTests(TestCase):
- """Tests for update_server_info."""
-
- def setUp(self):
- super().setUp()
- self.path = tempfile.mkdtemp()
- self.addCleanup(shutil.rmtree, self.path)
- self.repo = Repo.init(self.path)
-
- def test_empty(self):
- update_server_info(self.repo)
- with open(os.path.join(self.path, ".git", "info", "refs"), "rb") as f:
- self.assertEqual(b"", f.read())
- p = os.path.join(self.path, ".git", "objects", "info", "packs")
- with open(p, "rb") as f:
- self.assertEqual(b"", f.read())
-
- def test_simple(self):
- commit_id = self.repo.do_commit(
- message=b"foo",
- committer=b"Joe Example <joe@example.com>",
- ref=b"refs/heads/foo",
- )
- update_server_info(self.repo)
- with open(os.path.join(self.path, ".git", "info", "refs"), "rb") as f:
- self.assertEqual(f.read(), commit_id + b"\trefs/heads/foo\n")
- p = os.path.join(self.path, ".git", "objects", "info", "packs")
- with open(p, "rb") as f:
- self.assertEqual(f.read(), b"")
blob - d1e18db22b9445502cc682d3b4734d930069d27f (mode 644)
blob + /dev/null
--- dulwich/tests/test_stash.py
+++ /dev/null
-# test_stash.py -- tests for stash
-# Copyright (C) 2018 Jelmer Vernooij <jelmer@jelmer.uk>
-#
-# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
-# General Public License as public by the Free Software Foundation; version 2.0
-# or (at your option) any later version. You can redistribute it and/or
-# modify it under the terms of either of these two licenses.
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# You should have received a copy of the licenses; if not, see
-# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
-# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
-# License, Version 2.0.
-#
-
-"""Tests for stashes."""
-
-from ..repo import MemoryRepo
-from ..stash import Stash
-from . import TestCase
-
-
-class StashTests(TestCase):
- """Tests for stash."""
-
- def test_obtain(self):
- repo = MemoryRepo()
- stash = Stash.from_repo(repo)
- self.assertIsInstance(stash, Stash)
blob - 5760ef5ce567277a61a22776957cb93b3e897e2b (mode 644)
blob + /dev/null
--- dulwich/tests/test_utils.py
+++ /dev/null
-# test_utils.py -- Tests for git test utilities.
-# Copyright (C) 2010 Google, Inc.
-#
-# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
-# General Public License as public by the Free Software Foundation; version 2.0
-# or (at your option) any later version. You can redistribute it and/or
-# modify it under the terms of either of these two licenses.
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# You should have received a copy of the licenses; if not, see
-# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
-# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
-# License, Version 2.0.
-#
-
-"""Tests for git test utilities."""
-
-from dulwich.tests import TestCase
-
-from ..object_store import MemoryObjectStore
-from ..objects import Blob
-from .utils import build_commit_graph, make_object
-
-
-class BuildCommitGraphTest(TestCase):
- def setUp(self):
- super().setUp()
- self.store = MemoryObjectStore()
-
- def test_linear(self):
- c1, c2 = build_commit_graph(self.store, [[1], [2, 1]])
- for obj_id in [c1.id, c2.id, c1.tree, c2.tree]:
- self.assertIn(obj_id, self.store)
- self.assertEqual([], c1.parents)
- self.assertEqual([c1.id], c2.parents)
- self.assertEqual(c1.tree, c2.tree)
- self.assertEqual([], self.store[c1.tree].items())
- self.assertGreater(c2.commit_time, c1.commit_time)
-
- def test_merge(self):
- c1, c2, c3, c4 = build_commit_graph(
- self.store, [[1], [2, 1], [3, 1], [4, 2, 3]]
- )
- self.assertEqual([c2.id, c3.id], c4.parents)
- self.assertGreater(c4.commit_time, c2.commit_time)
- self.assertGreater(c4.commit_time, c3.commit_time)
-
- def test_missing_parent(self):
- self.assertRaises(
- ValueError, build_commit_graph, self.store, [[1], [3, 2], [2, 1]]
- )
-
- def test_trees(self):
- a1 = make_object(Blob, data=b"aaa1")
- a2 = make_object(Blob, data=b"aaa2")
- c1, c2 = build_commit_graph(
- self.store,
- [[1], [2, 1]],
- trees={1: [(b"a", a1)], 2: [(b"a", a2, 0o100644)]},
- )
- self.assertEqual((0o100644, a1.id), self.store[c1.tree][b"a"])
- self.assertEqual((0o100644, a2.id), self.store[c2.tree][b"a"])
-
- def test_attrs(self):
- c1, c2 = build_commit_graph(
- self.store, [[1], [2, 1]], attrs={1: {"message": b"Hooray!"}}
- )
- self.assertEqual(b"Hooray!", c1.message)
- self.assertEqual(b"Commit 2", c2.message)
-
- def test_commit_time(self):
- c1, c2, c3 = build_commit_graph(
- self.store,
- [[1], [2, 1], [3, 2]],
- attrs={1: {"commit_time": 124}, 2: {"commit_time": 123}},
- )
- self.assertEqual(124, c1.commit_time)
- self.assertEqual(123, c2.commit_time)
- self.assertTrue(c2.commit_time < c1.commit_time < c3.commit_time)
blob - 6acf80b9a12bef3db12e32ce9a7b96e0fc3966a0 (mode 644)
blob + /dev/null
--- dulwich/tests/test_walk.py
+++ /dev/null
-# test_walk.py -- Tests for commit walking functionality.
-# Copyright (C) 2010 Google, Inc.
-#
-# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
-# General Public License as public by the Free Software Foundation; version 2.0
-# or (at your option) any later version. You can redistribute it and/or
-# modify it under the terms of either of these two licenses.
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# You should have received a copy of the licenses; if not, see
-# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
-# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
-# License, Version 2.0.
-#
-
-"""Tests for commit walking functionality."""
-
-from itertools import permutations
-from unittest import expectedFailure
-
-from dulwich.tests import TestCase
-
-from ..diff_tree import CHANGE_MODIFY, CHANGE_RENAME, RenameDetector, TreeChange
-from ..errors import MissingCommitError
-from ..object_store import MemoryObjectStore
-from ..objects import Blob, Commit
-from ..walk import ORDER_TOPO, WalkEntry, Walker, _topo_reorder
-from .utils import F, build_commit_graph, make_object, make_tag
-
-
-class TestWalkEntry:
- def __init__(self, commit, changes) -> None:
- self.commit = commit
- self.changes = changes
-
- def __repr__(self) -> str:
- return f"<TestWalkEntry commit={self.commit.id}, changes={self.changes!r}>"
-
- def __eq__(self, other):
- if not isinstance(other, WalkEntry) or self.commit != other.commit:
- return False
- if self.changes is None:
- return True
- return self.changes == other.changes()
-
-
-class WalkerTest(TestCase):
- def setUp(self):
- super().setUp()
- self.store = MemoryObjectStore()
-
- def make_commits(self, commit_spec, **kwargs):
- times = kwargs.pop("times", [])
- attrs = kwargs.pop("attrs", {})
- for i, t in enumerate(times):
- attrs.setdefault(i + 1, {})["commit_time"] = t
- return build_commit_graph(self.store, commit_spec, attrs=attrs, **kwargs)
-
- def make_linear_commits(self, num_commits, **kwargs):
- commit_spec = []
- for i in range(1, num_commits + 1):
- c = [i]
- if i > 1:
- c.append(i - 1)
- commit_spec.append(c)
- return self.make_commits(commit_spec, **kwargs)
-
- def assertWalkYields(self, expected, *args, **kwargs):
- walker = Walker(self.store, *args, **kwargs)
- expected = list(expected)
- for i, entry in enumerate(expected):
- if isinstance(entry, Commit):
- expected[i] = TestWalkEntry(entry, None)
- actual = list(walker)
- self.assertEqual(expected, actual)
-
- def test_tag(self):
- c1, c2, c3 = self.make_linear_commits(3)
- t2 = make_tag(target=c2)
- self.store.add_object(t2)
- self.assertWalkYields([c2, c1], [t2.id])
-
- def test_linear(self):
- c1, c2, c3 = self.make_linear_commits(3)
- self.assertWalkYields([c1], [c1.id])
- self.assertWalkYields([c2, c1], [c2.id])
- self.assertWalkYields([c3, c2, c1], [c3.id])
- self.assertWalkYields([c3, c2, c1], [c3.id, c1.id])
- self.assertWalkYields([c3, c2], [c3.id], exclude=[c1.id])
- self.assertWalkYields([c3, c2], [c3.id, c1.id], exclude=[c1.id])
- self.assertWalkYields([c3], [c3.id, c1.id], exclude=[c2.id])
-
- def test_missing(self):
- cs = list(reversed(self.make_linear_commits(20)))
- self.assertWalkYields(cs, [cs[0].id])
-
- # Exactly how close we can get to a missing commit depends on our
- # implementation (in particular the choice of _MAX_EXTRA_COMMITS), but
- # we should at least be able to walk some history in a broken repo.
- del self.store[cs[-1].id]
- for i in range(1, 11):
- self.assertWalkYields(cs[:i], [cs[0].id], max_entries=i)
- self.assertRaises(MissingCommitError, Walker, self.store, [cs[-1].id])
-
- def test_branch(self):
- c1, x2, x3, y4 = self.make_commits([[1], [2, 1], [3, 2], [4, 1]])
- self.assertWalkYields([x3, x2, c1], [x3.id])
- self.assertWalkYields([y4, c1], [y4.id])
- self.assertWalkYields([y4, x2, c1], [y4.id, x2.id])
- self.assertWalkYields([y4, x2], [y4.id, x2.id], exclude=[c1.id])
- self.assertWalkYields([y4, x3], [y4.id, x3.id], exclude=[x2.id])
- self.assertWalkYields([y4], [y4.id], exclude=[x3.id])
- self.assertWalkYields([x3, x2], [x3.id], exclude=[y4.id])
-
- def test_merge(self):
- c1, c2, c3, c4 = self.make_commits([[1], [2, 1], [3, 1], [4, 2, 3]])
- self.assertWalkYields([c4, c3, c2, c1], [c4.id])
- self.assertWalkYields([c3, c1], [c3.id])
- self.assertWalkYields([c2, c1], [c2.id])
- self.assertWalkYields([c4, c3], [c4.id], exclude=[c2.id])
- self.assertWalkYields([c4, c2], [c4.id], exclude=[c3.id])
-
- def test_merge_of_new_branch_from_old_base(self):
- # The commit on the branch was made at a time after any of the
- # commits on master, but the branch was from an older commit.
- # See also test_merge_of_old_branch
- self.maxDiff = None
- c1, c2, c3, c4, c5 = self.make_commits(
- [[1], [2, 1], [3, 2], [4, 1], [5, 3, 4]],
- times=[1, 2, 3, 4, 5],
- )
- self.assertWalkYields([c5, c4, c3, c2, c1], [c5.id])
- self.assertWalkYields([c3, c2, c1], [c3.id])
- self.assertWalkYields([c2, c1], [c2.id])
-
- @expectedFailure
- def test_merge_of_old_branch(self):
- # The commit on the branch was made at a time before any of
- # the commits on master, but it was merged into master after
- # those commits.
- # See also test_merge_of_new_branch_from_old_base
- self.maxDiff = None
- c1, c2, c3, c4, c5 = self.make_commits(
- [[1], [2, 1], [3, 2], [4, 1], [5, 3, 4]],
- times=[1, 3, 4, 2, 5],
- )
- self.assertWalkYields([c5, c4, c3, c2, c1], [c5.id])
- self.assertWalkYields([c3, c2, c1], [c3.id])
- self.assertWalkYields([c2, c1], [c2.id])
-
- def test_reverse(self):
- c1, c2, c3 = self.make_linear_commits(3)
- self.assertWalkYields([c1, c2, c3], [c3.id], reverse=True)
-
- def test_max_entries(self):
- c1, c2, c3 = self.make_linear_commits(3)
- self.assertWalkYields([c3, c2, c1], [c3.id], max_entries=3)
- self.assertWalkYields([c3, c2], [c3.id], max_entries=2)
- self.assertWalkYields([c3], [c3.id], max_entries=1)
-
- def test_reverse_after_max_entries(self):
- c1, c2, c3 = self.make_linear_commits(3)
- self.assertWalkYields([c1, c2, c3], [c3.id], max_entries=3, reverse=True)
- self.assertWalkYields([c2, c3], [c3.id], max_entries=2, reverse=True)
- self.assertWalkYields([c3], [c3.id], max_entries=1, reverse=True)
-
- def test_changes_one_parent(self):
- blob_a1 = make_object(Blob, data=b"a1")
- blob_a2 = make_object(Blob, data=b"a2")
- blob_b2 = make_object(Blob, data=b"b2")
- c1, c2 = self.make_linear_commits(
- 2,
- trees={
- 1: [(b"a", blob_a1)],
- 2: [(b"a", blob_a2), (b"b", blob_b2)],
- },
- )
- e1 = TestWalkEntry(c1, [TreeChange.add((b"a", F, blob_a1.id))])
- e2 = TestWalkEntry(
- c2,
- [
- TreeChange(CHANGE_MODIFY, (b"a", F, blob_a1.id), (b"a", F, blob_a2.id)),
- TreeChange.add((b"b", F, blob_b2.id)),
- ],
- )
- self.assertWalkYields([e2, e1], [c2.id])
-
- def test_changes_multiple_parents(self):
- blob_a1 = make_object(Blob, data=b"a1")
- blob_b2 = make_object(Blob, data=b"b2")
- blob_a3 = make_object(Blob, data=b"a3")
- c1, c2, c3 = self.make_commits(
- [[1], [2], [3, 1, 2]],
- trees={
- 1: [(b"a", blob_a1)],
- 2: [(b"b", blob_b2)],
- 3: [(b"a", blob_a3), (b"b", blob_b2)],
- },
- )
- # a is a modify/add conflict and b is not conflicted.
- changes = [
- [
- TreeChange(CHANGE_MODIFY, (b"a", F, blob_a1.id), (b"a", F, blob_a3.id)),
- TreeChange.add((b"a", F, blob_a3.id)),
- ]
- ]
- self.assertWalkYields(
- [TestWalkEntry(c3, changes)], [c3.id], exclude=[c1.id, c2.id]
- )
-
- def test_path_matches(self):
- walker = Walker(None, [], paths=[b"foo", b"bar", b"baz/quux"])
- self.assertTrue(walker._path_matches(b"foo"))
- self.assertTrue(walker._path_matches(b"foo/a"))
- self.assertTrue(walker._path_matches(b"foo/a/b"))
- self.assertTrue(walker._path_matches(b"bar"))
- self.assertTrue(walker._path_matches(b"baz/quux"))
- self.assertTrue(walker._path_matches(b"baz/quux/a"))
-
- self.assertFalse(walker._path_matches(None))
- self.assertFalse(walker._path_matches(b"oops"))
- self.assertFalse(walker._path_matches(b"fool"))
- self.assertFalse(walker._path_matches(b"baz"))
- self.assertFalse(walker._path_matches(b"baz/quu"))
-
- def test_paths(self):
- blob_a1 = make_object(Blob, data=b"a1")
- blob_b2 = make_object(Blob, data=b"b2")
- blob_a3 = make_object(Blob, data=b"a3")
- blob_b3 = make_object(Blob, data=b"b3")
- c1, c2, c3 = self.make_linear_commits(
- 3,
- trees={
- 1: [(b"a", blob_a1)],
- 2: [(b"a", blob_a1), (b"x/b", blob_b2)],
- 3: [(b"a", blob_a3), (b"x/b", blob_b3)],
- },
- )
-
- self.assertWalkYields([c3, c2, c1], [c3.id])
- self.assertWalkYields([c3, c1], [c3.id], paths=[b"a"])
- self.assertWalkYields([c3, c2], [c3.id], paths=[b"x/b"])
-
- # All changes are included, not just for requested paths.
- changes = [
- TreeChange(CHANGE_MODIFY, (b"a", F, blob_a1.id), (b"a", F, blob_a3.id)),
- TreeChange(CHANGE_MODIFY, (b"x/b", F, blob_b2.id), (b"x/b", F, blob_b3.id)),
- ]
- self.assertWalkYields(
- [TestWalkEntry(c3, changes)], [c3.id], max_entries=1, paths=[b"a"]
- )
-
- def test_paths_subtree(self):
- blob_a = make_object(Blob, data=b"a")
- blob_b = make_object(Blob, data=b"b")
- c1, c2, c3 = self.make_linear_commits(
- 3,
- trees={
- 1: [(b"x/a", blob_a)],
- 2: [(b"b", blob_b), (b"x/a", blob_a)],
- 3: [(b"b", blob_b), (b"x/a", blob_a), (b"x/b", blob_b)],
- },
- )
- self.assertWalkYields([c2], [c3.id], paths=[b"b"])
- self.assertWalkYields([c3, c1], [c3.id], paths=[b"x"])
-
- def test_paths_max_entries(self):
- blob_a = make_object(Blob, data=b"a")
- blob_b = make_object(Blob, data=b"b")
- c1, c2 = self.make_linear_commits(
- 2, trees={1: [(b"a", blob_a)], 2: [(b"a", blob_a), (b"b", blob_b)]}
- )
- self.assertWalkYields([c2], [c2.id], paths=[b"b"], max_entries=1)
- self.assertWalkYields([c1], [c1.id], paths=[b"a"], max_entries=1)
-
- def test_paths_merge(self):
- blob_a1 = make_object(Blob, data=b"a1")
- blob_a2 = make_object(Blob, data=b"a2")
- blob_a3 = make_object(Blob, data=b"a3")
- x1, y2, m3, m4 = self.make_commits(
- [[1], [2], [3, 1, 2], [4, 1, 2]],
- trees={
- 1: [(b"a", blob_a1)],
- 2: [(b"a", blob_a2)],
- 3: [(b"a", blob_a3)],
- 4: [(b"a", blob_a1)],
- },
- ) # Non-conflicting
- self.assertWalkYields([m3, y2, x1], [m3.id], paths=[b"a"])
- self.assertWalkYields([y2, x1], [m4.id], paths=[b"a"])
-
- def test_changes_with_renames(self):
- blob = make_object(Blob, data=b"blob")
- c1, c2 = self.make_linear_commits(
- 2, trees={1: [(b"a", blob)], 2: [(b"b", blob)]}
- )
- entry_a = (b"a", F, blob.id)
- entry_b = (b"b", F, blob.id)
- changes_without_renames = [
- TreeChange.delete(entry_a),
- TreeChange.add(entry_b),
- ]
- changes_with_renames = [TreeChange(CHANGE_RENAME, entry_a, entry_b)]
- self.assertWalkYields(
- [TestWalkEntry(c2, changes_without_renames)],
- [c2.id],
- max_entries=1,
- )
- detector = RenameDetector(self.store)
- self.assertWalkYields(
- [TestWalkEntry(c2, changes_with_renames)],
- [c2.id],
- max_entries=1,
- rename_detector=detector,
- )
-
- def test_follow_rename(self):
- blob = make_object(Blob, data=b"blob")
- names = [b"a", b"a", b"b", b"b", b"c", b"c"]
-
- trees = {i + 1: [(n, blob, F)] for i, n in enumerate(names)}
- c1, c2, c3, c4, c5, c6 = self.make_linear_commits(6, trees=trees)
- self.assertWalkYields([c5], [c6.id], paths=[b"c"])
-
- def e(n):
- return (n, F, blob.id)
-
- self.assertWalkYields(
- [
- TestWalkEntry(c5, [TreeChange(CHANGE_RENAME, e(b"b"), e(b"c"))]),
- TestWalkEntry(c3, [TreeChange(CHANGE_RENAME, e(b"a"), e(b"b"))]),
- TestWalkEntry(c1, [TreeChange.add(e(b"a"))]),
- ],
- [c6.id],
- paths=[b"c"],
- follow=True,
- )
-
- def test_follow_rename_remove_path(self):
- blob = make_object(Blob, data=b"blob")
- _, _, _, c4, c5, c6 = self.make_linear_commits(
- 6,
- trees={
- 1: [(b"a", blob), (b"c", blob)],
- 2: [],
- 3: [],
- 4: [(b"b", blob)],
- 5: [(b"a", blob)],
- 6: [(b"c", blob)],
- },
- )
-
- def e(n):
- return (n, F, blob.id)
-
- # Once the path changes to b, we aren't interested in a or c anymore.
- self.assertWalkYields(
- [
- TestWalkEntry(c6, [TreeChange(CHANGE_RENAME, e(b"a"), e(b"c"))]),
- TestWalkEntry(c5, [TreeChange(CHANGE_RENAME, e(b"b"), e(b"a"))]),
- TestWalkEntry(c4, [TreeChange.add(e(b"b"))]),
- ],
- [c6.id],
- paths=[b"c"],
- follow=True,
- )
-
- def test_since(self):
- c1, c2, c3 = self.make_linear_commits(3)
- self.assertWalkYields([c3, c2, c1], [c3.id], since=-1)
- self.assertWalkYields([c3, c2, c1], [c3.id], since=0)
- self.assertWalkYields([c3, c2], [c3.id], since=1)
- self.assertWalkYields([c3, c2], [c3.id], since=99)
- self.assertWalkYields([c3, c2], [c3.id], since=100)
- self.assertWalkYields([c3], [c3.id], since=101)
- self.assertWalkYields([c3], [c3.id], since=199)
- self.assertWalkYields([c3], [c3.id], since=200)
- self.assertWalkYields([], [c3.id], since=201)
- self.assertWalkYields([], [c3.id], since=300)
-
- def test_until(self):
- c1, c2, c3 = self.make_linear_commits(3)
- self.assertWalkYields([], [c3.id], until=-1)
- self.assertWalkYields([c1], [c3.id], until=0)
- self.assertWalkYields([c1], [c3.id], until=1)
- self.assertWalkYields([c1], [c3.id], until=99)
- self.assertWalkYields([c2, c1], [c3.id], until=100)
- self.assertWalkYields([c2, c1], [c3.id], until=101)
- self.assertWalkYields([c2, c1], [c3.id], until=199)
- self.assertWalkYields([c3, c2, c1], [c3.id], until=200)
- self.assertWalkYields([c3, c2, c1], [c3.id], until=201)
- self.assertWalkYields([c3, c2, c1], [c3.id], until=300)
-
- def test_since_until(self):
- c1, c2, c3 = self.make_linear_commits(3)
- self.assertWalkYields([], [c3.id], since=100, until=99)
- self.assertWalkYields([c3, c2, c1], [c3.id], since=-1, until=201)
- self.assertWalkYields([c2], [c3.id], since=100, until=100)
- self.assertWalkYields([c2], [c3.id], since=50, until=150)
-
- def test_since_over_scan(self):
- commits = self.make_linear_commits(11, times=[9, 0, 1, 2, 3, 4, 5, 8, 6, 7, 9])
- c8, _, c10, c11 = commits[-4:]
- del self.store[commits[0].id]
- # c9 is older than we want to walk, but is out of order with its
- # parent, so we need to walk past it to get to c8.
- # c1 would also match, but we've deleted it, and it should get pruned
- # even with over-scanning.
- self.assertWalkYields([c11, c10, c8], [c11.id], since=7)
-
- def assertTopoOrderEqual(self, expected_commits, commits):
- entries = [TestWalkEntry(c, None) for c in commits]
- actual_ids = [e.commit.id for e in list(_topo_reorder(entries))]
- self.assertEqual([c.id for c in expected_commits], actual_ids)
-
- def test_topo_reorder_linear(self):
- commits = self.make_linear_commits(5)
- commits.reverse()
- for perm in permutations(commits):
- self.assertTopoOrderEqual(commits, perm)
-
- def test_topo_reorder_multiple_parents(self):
- c1, c2, c3 = self.make_commits([[1], [2], [3, 1, 2]])
- # Already sorted, so totally FIFO.
- self.assertTopoOrderEqual([c3, c2, c1], [c3, c2, c1])
- self.assertTopoOrderEqual([c3, c1, c2], [c3, c1, c2])
-
- # c3 causes one parent to be yielded.
- self.assertTopoOrderEqual([c3, c2, c1], [c2, c3, c1])
- self.assertTopoOrderEqual([c3, c1, c2], [c1, c3, c2])
-
- # c3 causes both parents to be yielded.
- self.assertTopoOrderEqual([c3, c2, c1], [c1, c2, c3])
- self.assertTopoOrderEqual([c3, c2, c1], [c2, c1, c3])
-
- def test_topo_reorder_multiple_children(self):
- c1, c2, c3 = self.make_commits([[1], [2, 1], [3, 1]])
-
- # c2 and c3 are FIFO but c1 moves to the end.
- self.assertTopoOrderEqual([c3, c2, c1], [c3, c2, c1])
- self.assertTopoOrderEqual([c3, c2, c1], [c3, c1, c2])
- self.assertTopoOrderEqual([c3, c2, c1], [c1, c3, c2])
-
- self.assertTopoOrderEqual([c2, c3, c1], [c2, c3, c1])
- self.assertTopoOrderEqual([c2, c3, c1], [c2, c1, c3])
- self.assertTopoOrderEqual([c2, c3, c1], [c1, c2, c3])
-
- def test_out_of_order_children(self):
- c1, c2, c3, c4, c5 = self.make_commits(
- [[1], [2, 1], [3, 2], [4, 1], [5, 3, 4]], times=[2, 1, 3, 4, 5]
- )
- self.assertWalkYields([c5, c4, c3, c1, c2], [c5.id])
- self.assertWalkYields([c5, c4, c3, c2, c1], [c5.id], order=ORDER_TOPO)
-
- def test_out_of_order_with_exclude(self):
- # Create the following graph:
- # c1-------x2---m6
- # \ /
- # \-y3--y4-/--y5
- # Due to skew, y5 is the oldest commit.
- c1, x2, y3, y4, y5, m6 = self.make_commits(
- [[1], [2, 1], [3, 1], [4, 3], [5, 4], [6, 2, 4]],
- times=[2, 3, 4, 5, 1, 6],
- )
- self.assertWalkYields([m6, y4, y3, x2, c1], [m6.id])
- # Ensure that c1..y4 get excluded even though they're popped from the
- # priority queue long before y5.
- self.assertWalkYields([m6, x2], [m6.id], exclude=[y5.id])
-
- def test_empty_walk(self):
- c1, c2, c3 = self.make_linear_commits(3)
- self.assertWalkYields([], [c3.id], exclude=[c3.id])
-
-
-class WalkEntryTest(TestCase):
- def setUp(self):
- super().setUp()
- self.store = MemoryObjectStore()
-
- def make_commits(self, commit_spec, **kwargs):
- times = kwargs.pop("times", [])
- attrs = kwargs.pop("attrs", {})
- for i, t in enumerate(times):
- attrs.setdefault(i + 1, {})["commit_time"] = t
- return build_commit_graph(self.store, commit_spec, attrs=attrs, **kwargs)
-
- def make_linear_commits(self, num_commits, **kwargs):
- commit_spec = []
- for i in range(1, num_commits + 1):
- c = [i]
- if i > 1:
- c.append(i - 1)
- commit_spec.append(c)
- return self.make_commits(commit_spec, **kwargs)
-
- def test_all_changes(self):
- # Construct a commit with 2 files in different subdirectories.
- blob_a = make_object(Blob, data=b"a")
- blob_b = make_object(Blob, data=b"b")
- c1 = self.make_linear_commits(
- 1,
- trees={1: [(b"x/a", blob_a), (b"y/b", blob_b)]},
- )[0]
-
- # Get the WalkEntry for the commit.
- walker = Walker(self.store, c1.id)
- walker_entry = next(iter(walker))
- changes = walker_entry.changes()
-
- # Compare the changes with the expected values.
- entry_a = (b"x/a", F, blob_a.id)
- entry_b = (b"y/b", F, blob_b.id)
- self.assertEqual(
- [TreeChange.add(entry_a), TreeChange.add(entry_b)],
- changes,
- )
-
- def test_all_with_merge(self):
- blob_a = make_object(Blob, data=b"a")
- blob_a2 = make_object(Blob, data=b"a2")
- blob_b = make_object(Blob, data=b"b")
- blob_b2 = make_object(Blob, data=b"b2")
- x1, y2, m3 = self.make_commits(
- [[1], [2], [3, 1, 2]],
- trees={
- 1: [(b"x/a", blob_a)],
- 2: [(b"y/b", blob_b)],
- 3: [(b"x/a", blob_a2), (b"y/b", blob_b2)],
- },
- )
-
- # Get the WalkEntry for the merge commit.
- walker = Walker(self.store, m3.id)
- entries = list(walker)
- walker_entry = entries[0]
- self.assertEqual(walker_entry.commit.id, m3.id)
- changes = walker_entry.changes()
- self.assertEqual(2, len(changes))
-
- entry_a = (b"x/a", F, blob_a.id)
- entry_a2 = (b"x/a", F, blob_a2.id)
- entry_b = (b"y/b", F, blob_b.id)
- entry_b2 = (b"y/b", F, blob_b2.id)
- self.assertEqual(
- [
- [
- TreeChange(CHANGE_MODIFY, entry_a, entry_a2),
- TreeChange.add(entry_a2),
- ],
- [
- TreeChange.add(entry_b2),
- TreeChange(CHANGE_MODIFY, entry_b, entry_b2),
- ],
- ],
- changes,
- )
-
- def test_filter_changes(self):
- # Construct a commit with 2 files in different subdirectories.
- blob_a = make_object(Blob, data=b"a")
- blob_b = make_object(Blob, data=b"b")
- c1 = self.make_linear_commits(
- 1,
- trees={1: [(b"x/a", blob_a), (b"y/b", blob_b)]},
- )[0]
-
- # Get the WalkEntry for the commit.
- walker = Walker(self.store, c1.id)
- walker_entry = next(iter(walker))
- changes = walker_entry.changes(path_prefix=b"x")
-
- # Compare the changes with the expected values.
- entry_a = (b"a", F, blob_a.id)
- self.assertEqual(
- [TreeChange.add(entry_a)],
- changes,
- )
-
- def test_filter_with_merge(self):
- blob_a = make_object(Blob, data=b"a")
- blob_a2 = make_object(Blob, data=b"a2")
- blob_b = make_object(Blob, data=b"b")
- blob_b2 = make_object(Blob, data=b"b2")
- x1, y2, m3 = self.make_commits(
- [[1], [2], [3, 1, 2]],
- trees={
- 1: [(b"x/a", blob_a)],
- 2: [(b"y/b", blob_b)],
- 3: [(b"x/a", blob_a2), (b"y/b", blob_b2)],
- },
- )
-
- # Get the WalkEntry for the merge commit.
- walker = Walker(self.store, m3.id)
- entries = list(walker)
- walker_entry = entries[0]
- self.assertEqual(walker_entry.commit.id, m3.id)
- changes = walker_entry.changes(b"x")
- self.assertEqual(1, len(changes))
-
- entry_a = (b"a", F, blob_a.id)
- entry_a2 = (b"a", F, blob_a2.id)
- self.assertEqual(
- [[TreeChange(CHANGE_MODIFY, entry_a, entry_a2)]],
- changes,
- )
blob - 223537c9d52d9c5c4b970f0db9431e297691f3b8 (mode 644)
blob + /dev/null
--- dulwich/tests/test_web.py
+++ /dev/null
-# test_web.py -- Tests for the git HTTP server
-# Copyright (C) 2010 Google, Inc.
-#
-# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
-# General Public License as public by the Free Software Foundation; version 2.0
-# or (at your option) any later version. You can redistribute it and/or
-# modify it under the terms of either of these two licenses.
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# You should have received a copy of the licenses; if not, see
-# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
-# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
-# License, Version 2.0.
-#
-
-"""Tests for the Git HTTP server."""
-
-import gzip
-import os
-import re
-from io import BytesIO
-from typing import Type
-
-from dulwich.tests import TestCase
-
-from ..object_store import MemoryObjectStore
-from ..objects import Blob
-from ..repo import BaseRepo, MemoryRepo
-from ..server import DictBackend
-from ..web import (
- HTTP_ERROR,
- HTTP_FORBIDDEN,
- HTTP_NOT_FOUND,
- HTTP_OK,
- GunzipFilter,
- HTTPGitApplication,
- HTTPGitRequest,
- _LengthLimitedFile,
- get_idx_file,
- get_info_packs,
- get_info_refs,
- get_loose_object,
- get_pack_file,
- get_text_file,
- handle_service_request,
- send_file,
-)
-from .utils import make_object, make_tag
-
-
-class MinimalistWSGIInputStream:
- """WSGI input stream with no 'seek()' and 'tell()' methods."""
-
- def __init__(self, data) -> None:
- self.data = data
- self.pos = 0
-
- def read(self, howmuch):
- start = self.pos
- end = self.pos + howmuch
- if start >= len(self.data):
- return b""
- self.pos = end
- return self.data[start:end]
-
-
-class MinimalistWSGIInputStream2(MinimalistWSGIInputStream):
- """WSGI input stream with no *working* 'seek()' and 'tell()' methods."""
-
- def seek(self, pos):
- raise NotImplementedError
-
- def tell(self):
- raise NotImplementedError
-
-
-class TestHTTPGitRequest(HTTPGitRequest):
- """HTTPGitRequest with overridden methods to help test caching."""
-
- def __init__(self, *args, **kwargs) -> None:
- HTTPGitRequest.__init__(self, *args, **kwargs)
- self.cached = None
-
- def nocache(self):
- self.cached = False
-
- def cache_forever(self):
- self.cached = True
-
-
-class WebTestCase(TestCase):
- """Base TestCase with useful instance vars and utility functions."""
-
- _req_class: Type[HTTPGitRequest] = TestHTTPGitRequest
-
- def setUp(self):
- super().setUp()
- self._environ = {}
- self._req = self._req_class(
- self._environ, self._start_response, handlers=self._handlers()
- )
- self._status = None
- self._headers = []
- self._output = BytesIO()
-
- def _start_response(self, status, headers):
- self._status = status
- self._headers = list(headers)
- return self._output.write
-
- def _handlers(self):
- return None
-
- def assertContentTypeEquals(self, expected):
- self.assertIn(("Content-Type", expected), self._headers)
-
-
-def _test_backend(objects, refs=None, named_files=None):
- if not refs:
- refs = {}
- if not named_files:
- named_files = {}
- repo = MemoryRepo.init_bare(objects, refs)
- for path, contents in named_files.items():
- repo._put_named_file(path, contents)
- return DictBackend({"/": repo})
-
-
-class DumbHandlersTestCase(WebTestCase):
- def test_send_file_not_found(self):
- list(send_file(self._req, None, "text/plain"))
- self.assertEqual(HTTP_NOT_FOUND, self._status)
-
- def test_send_file(self):
- f = BytesIO(b"foobar")
- output = b"".join(send_file(self._req, f, "some/thing"))
- self.assertEqual(b"foobar", output)
- self.assertEqual(HTTP_OK, self._status)
- self.assertContentTypeEquals("some/thing")
- self.assertTrue(f.closed)
-
- def test_send_file_buffered(self):
- bufsize = 10240
- xs = b"x" * bufsize
- f = BytesIO(2 * xs)
- self.assertEqual([xs, xs], list(send_file(self._req, f, "some/thing")))
- self.assertEqual(HTTP_OK, self._status)
- self.assertContentTypeEquals("some/thing")
- self.assertTrue(f.closed)
-
- def test_send_file_error(self):
- class TestFile:
- def __init__(self, exc_class) -> None:
- self.closed = False
- self._exc_class = exc_class
-
- def read(self, size=-1):
- raise self._exc_class
-
- def close(self):
- self.closed = True
-
- f = TestFile(IOError)
- list(send_file(self._req, f, "some/thing"))
- self.assertEqual(HTTP_ERROR, self._status)
- self.assertTrue(f.closed)
- self.assertFalse(self._req.cached)
-
- # non-IOErrors are reraised
- f = TestFile(AttributeError)
- self.assertRaises(AttributeError, list, send_file(self._req, f, "some/thing"))
- self.assertTrue(f.closed)
- self.assertFalse(self._req.cached)
-
- def test_get_text_file(self):
- backend = _test_backend([], named_files={"description": b"foo"})
- mat = re.search(".*", "description")
- output = b"".join(get_text_file(self._req, backend, mat))
- self.assertEqual(b"foo", output)
- self.assertEqual(HTTP_OK, self._status)
- self.assertContentTypeEquals("text/plain")
- self.assertFalse(self._req.cached)
-
- def test_get_loose_object(self):
- blob = make_object(Blob, data=b"foo")
- backend = _test_backend([blob])
- mat = re.search("^(..)(.{38})$", blob.id.decode("ascii"))
- output = b"".join(get_loose_object(self._req, backend, mat))
- self.assertEqual(blob.as_legacy_object(), output)
- self.assertEqual(HTTP_OK, self._status)
- self.assertContentTypeEquals("application/x-git-loose-object")
- self.assertTrue(self._req.cached)
-
- def test_get_loose_object_missing(self):
- mat = re.search("^(..)(.{38})$", "1" * 40)
- list(get_loose_object(self._req, _test_backend([]), mat))
- self.assertEqual(HTTP_NOT_FOUND, self._status)
-
- def test_get_loose_object_error(self):
- blob = make_object(Blob, data=b"foo")
- backend = _test_backend([blob])
- mat = re.search("^(..)(.{38})$", blob.id.decode("ascii"))
-
- def as_legacy_object_error(self):
- raise OSError
-
- self.addCleanup(setattr, Blob, "as_legacy_object", Blob.as_legacy_object)
- Blob.as_legacy_object = as_legacy_object_error
- list(get_loose_object(self._req, backend, mat))
- self.assertEqual(HTTP_ERROR, self._status)
-
- def test_get_pack_file(self):
- pack_name = os.path.join("objects", "pack", "pack-%s.pack" % ("1" * 40))
- backend = _test_backend([], named_files={pack_name: b"pack contents"})
- mat = re.search(".*", pack_name)
- output = b"".join(get_pack_file(self._req, backend, mat))
- self.assertEqual(b"pack contents", output)
- self.assertEqual(HTTP_OK, self._status)
- self.assertContentTypeEquals("application/x-git-packed-objects")
- self.assertTrue(self._req.cached)
-
- def test_get_idx_file(self):
- idx_name = os.path.join("objects", "pack", "pack-%s.idx" % ("1" * 40))
- backend = _test_backend([], named_files={idx_name: b"idx contents"})
- mat = re.search(".*", idx_name)
- output = b"".join(get_idx_file(self._req, backend, mat))
- self.assertEqual(b"idx contents", output)
- self.assertEqual(HTTP_OK, self._status)
- self.assertContentTypeEquals("application/x-git-packed-objects-toc")
- self.assertTrue(self._req.cached)
-
- def test_get_info_refs(self):
- self._environ["QUERY_STRING"] = ""
-
- blob1 = make_object(Blob, data=b"1")
- blob2 = make_object(Blob, data=b"2")
- blob3 = make_object(Blob, data=b"3")
-
- tag1 = make_tag(blob2, name=b"tag-tag")
-
- objects = [blob1, blob2, blob3, tag1]
- refs = {
- b"HEAD": b"000",
- b"refs/heads/master": blob1.id,
- b"refs/tags/tag-tag": tag1.id,
- b"refs/tags/blob-tag": blob3.id,
- }
- backend = _test_backend(objects, refs=refs)
-
- mat = re.search(".*", "//info/refs")
- self.assertEqual(
- [
- blob1.id + b"\trefs/heads/master\n",
- blob3.id + b"\trefs/tags/blob-tag\n",
- tag1.id + b"\trefs/tags/tag-tag\n",
- blob2.id + b"\trefs/tags/tag-tag^{}\n",
- ],
- list(get_info_refs(self._req, backend, mat)),
- )
- self.assertEqual(HTTP_OK, self._status)
- self.assertContentTypeEquals("text/plain")
- self.assertFalse(self._req.cached)
-
- def test_get_info_refs_not_found(self):
- self._environ["QUERY_STRING"] = ""
-
- objects = []
- refs = {}
- backend = _test_backend(objects, refs=refs)
-
- mat = re.search("info/refs", "/foo/info/refs")
- self.assertEqual(
- [b"No git repository was found at /foo"],
- list(get_info_refs(self._req, backend, mat)),
- )
- self.assertEqual(HTTP_NOT_FOUND, self._status)
- self.assertContentTypeEquals("text/plain")
-
- def test_get_info_packs(self):
- class TestPackData:
- def __init__(self, sha) -> None:
- self.filename = "pack-%s.pack" % sha
-
- class TestPack:
- def __init__(self, sha) -> None:
- self.data = TestPackData(sha)
-
- packs = [TestPack(str(i) * 40) for i in range(1, 4)]
-
- class TestObjectStore(MemoryObjectStore):
- # property must be overridden, can't be assigned
- @property
- def packs(self):
- return packs
-
- store = TestObjectStore()
- repo = BaseRepo(store, None)
- backend = DictBackend({"/": repo})
- mat = re.search(".*", "//info/packs")
- output = b"".join(get_info_packs(self._req, backend, mat))
- expected = b"".join(
- [(b"P pack-" + s + b".pack\n") for s in [b"1" * 40, b"2" * 40, b"3" * 40]]
- )
- self.assertEqual(expected, output)
- self.assertEqual(HTTP_OK, self._status)
- self.assertContentTypeEquals("text/plain")
- self.assertFalse(self._req.cached)
-
-
-class SmartHandlersTestCase(WebTestCase):
- class _TestUploadPackHandler:
- def __init__(
- self,
- backend,
- args,
- proto,
- stateless_rpc=None,
- advertise_refs=False,
- ) -> None:
- self.args = args
- self.proto = proto
- self.stateless_rpc = stateless_rpc
- self.advertise_refs = advertise_refs
-
- def handle(self):
- self.proto.write(b"handled input: " + self.proto.recv(1024))
-
- def _make_handler(self, *args, **kwargs):
- self._handler = self._TestUploadPackHandler(*args, **kwargs)
- return self._handler
-
- def _handlers(self):
- return {b"git-upload-pack": self._make_handler}
-
- def test_handle_service_request_unknown(self):
- mat = re.search(".*", "/git-evil-handler")
- content = list(handle_service_request(self._req, "backend", mat))
- self.assertEqual(HTTP_FORBIDDEN, self._status)
- self.assertNotIn(b"git-evil-handler", b"".join(content))
- self.assertFalse(self._req.cached)
-
- def _run_handle_service_request(self, content_length=None):
- self._environ["wsgi.input"] = BytesIO(b"foo")
- if content_length is not None:
- self._environ["CONTENT_LENGTH"] = content_length
- mat = re.search(".*", "/git-upload-pack")
-
- class Backend:
- def open_repository(self, path):
- return None
-
- handler_output = b"".join(handle_service_request(self._req, Backend(), mat))
- write_output = self._output.getvalue()
- # Ensure all output was written via the write callback.
- self.assertEqual(b"", handler_output)
- self.assertEqual(b"handled input: foo", write_output)
- self.assertContentTypeEquals("application/x-git-upload-pack-result")
- self.assertFalse(self._handler.advertise_refs)
- self.assertTrue(self._handler.stateless_rpc)
- self.assertFalse(self._req.cached)
-
- def test_handle_service_request(self):
- self._run_handle_service_request()
-
- def test_handle_service_request_with_length(self):
- self._run_handle_service_request(content_length="3")
-
- def test_handle_service_request_empty_length(self):
- self._run_handle_service_request(content_length="")
-
- def test_get_info_refs_unknown(self):
- self._environ["QUERY_STRING"] = "service=git-evil-handler"
-
- class Backend:
- def open_repository(self, url):
- return None
-
- mat = re.search(".*", "/git-evil-pack")
- content = list(get_info_refs(self._req, Backend(), mat))
- self.assertNotIn(b"git-evil-handler", b"".join(content))
- self.assertEqual(HTTP_FORBIDDEN, self._status)
- self.assertFalse(self._req.cached)
-
- def test_get_info_refs(self):
- self._environ["wsgi.input"] = BytesIO(b"foo")
- self._environ["QUERY_STRING"] = "service=git-upload-pack"
-
- class Backend:
- def open_repository(self, url):
- return None
-
- mat = re.search(".*", "/git-upload-pack")
- handler_output = b"".join(get_info_refs(self._req, Backend(), mat))
- write_output = self._output.getvalue()
- self.assertEqual(
- (
- b"001e# service=git-upload-pack\n"
- b"0000"
- # input is ignored by the handler
- b"handled input: "
- ),
- write_output,
- )
- # Ensure all output was written via the write callback.
- self.assertEqual(b"", handler_output)
- self.assertTrue(self._handler.advertise_refs)
- self.assertTrue(self._handler.stateless_rpc)
- self.assertFalse(self._req.cached)
-
-
-class LengthLimitedFileTestCase(TestCase):
- def test_no_cutoff(self):
- f = _LengthLimitedFile(BytesIO(b"foobar"), 1024)
- self.assertEqual(b"foobar", f.read())
-
- def test_cutoff(self):
- f = _LengthLimitedFile(BytesIO(b"foobar"), 3)
- self.assertEqual(b"foo", f.read())
- self.assertEqual(b"", f.read())
-
- def test_multiple_reads(self):
- f = _LengthLimitedFile(BytesIO(b"foobar"), 3)
- self.assertEqual(b"fo", f.read(2))
- self.assertEqual(b"o", f.read(2))
- self.assertEqual(b"", f.read())
-
-
-class HTTPGitRequestTestCase(WebTestCase):
- # This class tests the contents of the actual cache headers
- _req_class = HTTPGitRequest
-
- def test_not_found(self):
- self._req.cache_forever() # cache headers should be discarded
- message = "Something not found"
- self.assertEqual(message.encode("ascii"), self._req.not_found(message))
- self.assertEqual(HTTP_NOT_FOUND, self._status)
- self.assertEqual({("Content-Type", "text/plain")}, set(self._headers))
-
- def test_forbidden(self):
- self._req.cache_forever() # cache headers should be discarded
- message = "Something not found"
- self.assertEqual(message.encode("ascii"), self._req.forbidden(message))
- self.assertEqual(HTTP_FORBIDDEN, self._status)
- self.assertEqual({("Content-Type", "text/plain")}, set(self._headers))
-
- def test_respond_ok(self):
- self._req.respond()
- self.assertEqual([], self._headers)
- self.assertEqual(HTTP_OK, self._status)
-
- def test_respond(self):
- self._req.nocache()
- self._req.respond(
- status=402,
- content_type="some/type",
- headers=[("X-Foo", "foo"), ("X-Bar", "bar")],
- )
- self.assertEqual(
- {
- ("X-Foo", "foo"),
- ("X-Bar", "bar"),
- ("Content-Type", "some/type"),
- ("Expires", "Fri, 01 Jan 1980 00:00:00 GMT"),
- ("Pragma", "no-cache"),
- ("Cache-Control", "no-cache, max-age=0, must-revalidate"),
- },
- set(self._headers),
- )
- self.assertEqual(402, self._status)
-
-
-class HTTPGitApplicationTestCase(TestCase):
- def setUp(self):
- super().setUp()
- self._app = HTTPGitApplication("backend")
-
- self._environ = {
- "PATH_INFO": "/foo",
- "REQUEST_METHOD": "GET",
- }
-
- def _test_handler(self, req, backend, mat):
- # tests interface used by all handlers
- self.assertEqual(self._environ, req.environ)
- self.assertEqual("backend", backend)
- self.assertEqual("/foo", mat.group(0))
- return "output"
-
- def _add_handler(self, app):
- req = self._environ["REQUEST_METHOD"]
- app.services = {
- (req, re.compile("/foo$")): self._test_handler,
- }
-
- def test_call(self):
- self._add_handler(self._app)
- self.assertEqual("output", self._app(self._environ, None))
-
- def test_fallback_app(self):
- def test_app(environ, start_response):
- return "output"
-
- app = HTTPGitApplication("backend", fallback_app=test_app)
- self.assertEqual("output", app(self._environ, None))
-
-
-class GunzipTestCase(HTTPGitApplicationTestCase):
- __doc__ = """TestCase for testing the GunzipFilter, ensuring the wsgi.input
- is correctly decompressed and headers are corrected.
- """
- example_text = __doc__.encode("ascii")
-
- def setUp(self):
- super().setUp()
- self._app = GunzipFilter(self._app)
- self._environ["HTTP_CONTENT_ENCODING"] = "gzip"
- self._environ["REQUEST_METHOD"] = "POST"
-
- def _get_zstream(self, text):
- zstream = BytesIO()
- zfile = gzip.GzipFile(fileobj=zstream, mode="wb")
- zfile.write(text)
- zfile.close()
- zlength = zstream.tell()
- zstream.seek(0)
- return zstream, zlength
-
- def _test_call(self, orig, zstream, zlength):
- self._add_handler(self._app.app)
- self.assertLess(zlength, len(orig))
- self.assertEqual(self._environ["HTTP_CONTENT_ENCODING"], "gzip")
- self._environ["CONTENT_LENGTH"] = zlength
- self._environ["wsgi.input"] = zstream
- self._app(self._environ, None)
- buf = self._environ["wsgi.input"]
- self.assertIsNot(buf, zstream)
- buf.seek(0)
- self.assertEqual(orig, buf.read())
- self.assertIs(None, self._environ.get("CONTENT_LENGTH"))
- self.assertNotIn("HTTP_CONTENT_ENCODING", self._environ)
-
- def test_call(self):
- self._test_call(self.example_text, *self._get_zstream(self.example_text))
-
- def test_call_no_seek(self):
- """This ensures that the gunzipping code doesn't require any methods on
- 'wsgi.input' except for '.read()'. (In particular, it shouldn't
- require '.seek()'. See https://github.com/jelmer/dulwich/issues/140.).
- """
- zstream, zlength = self._get_zstream(self.example_text)
- self._test_call(
- self.example_text,
- MinimalistWSGIInputStream(zstream.read()),
- zlength,
- )
-
- def test_call_no_working_seek(self):
- """Similar to 'test_call_no_seek', but this time the methods are available
- (but defunct). See https://github.com/jonashaag/klaus/issues/154.
- """
- zstream, zlength = self._get_zstream(self.example_text)
- self._test_call(
- self.example_text,
- MinimalistWSGIInputStream2(zstream.read()),
- zlength,
- )
blob - 22f5286456b0eba56dffc49c24c0103353fa1562 (mode 644)
blob + /dev/null
--- dulwich/tests/utils.py
+++ /dev/null
-# utils.py -- Test utilities for Dulwich.
-# Copyright (C) 2010 Google, Inc.
-#
-# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
-# General Public License as public by the Free Software Foundation; version 2.0
-# or (at your option) any later version. You can redistribute it and/or
-# modify it under the terms of either of these two licenses.
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# You should have received a copy of the licenses; if not, see
-# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
-# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
-# License, Version 2.0.
-#
-
-"""Utility functions common to Dulwich tests."""
-
-import datetime
-import os
-import shutil
-import tempfile
-import time
-import types
-import warnings
-
-from dulwich.tests import SkipTest
-
-from ..index import commit_tree
-from ..objects import Commit, FixedSha, Tag, object_class
-from ..pack import (
- DELTA_TYPES,
- OFS_DELTA,
- REF_DELTA,
- SHA1Writer,
- create_delta,
- obj_sha,
- write_pack_header,
- write_pack_object,
-)
-from ..repo import Repo
-
-# Plain files are very frequently used in tests, so let the mode be very short.
-F = 0o100644 # Shorthand mode for Files.
-
-
-def open_repo(name, temp_dir=None):
- """Open a copy of a repo in a temporary directory.
-
- Use this function for accessing repos in dulwich/tests/data/repos to avoid
- accidentally or intentionally modifying those repos in place. Use
- tear_down_repo to delete any temp files created.
-
- Args:
- name: The name of the repository, relative to
- dulwich/tests/data/repos
- temp_dir: temporary directory to initialize to. If not provided, a
- temporary directory will be created.
- Returns: An initialized Repo object that lives in a temporary directory.
- """
- if temp_dir is None:
- temp_dir = tempfile.mkdtemp()
- repo_dir = os.path.join(
- os.path.dirname(__file__), "..", "..", "testdata", "repos", name
- )
- temp_repo_dir = os.path.join(temp_dir, name)
- shutil.copytree(repo_dir, temp_repo_dir, symlinks=True)
- return Repo(temp_repo_dir)
-
-
-def tear_down_repo(repo):
- """Tear down a test repository."""
- repo.close()
- temp_dir = os.path.dirname(repo.path.rstrip(os.sep))
- shutil.rmtree(temp_dir)
-
-
-def make_object(cls, **attrs):
- """Make an object for testing and assign some members.
-
- This method creates a new subclass to allow arbitrary attribute
- reassignment, which is not otherwise possible with objects having
- __slots__.
-
- Args:
- attrs: dict of attributes to set on the new object.
- Returns: A newly initialized object of type cls.
- """
-
- class TestObject(cls):
- """Class that inherits from the given class, but without __slots__.
-
- Note that classes with __slots__ can't have arbitrary attributes
- monkey-patched in, so this is a class that is exactly the same only
- with a __dict__ instead of __slots__.
- """
-
- TestObject.__name__ = "TestObject_" + cls.__name__
-
- obj = TestObject()
- for name, value in attrs.items():
- if name == "id":
- # id property is read-only, so we overwrite sha instead.
- sha = FixedSha(value)
- obj.sha = lambda: sha
- else:
- setattr(obj, name, value)
- return obj
-
-
-def make_commit(**attrs):
- """Make a Commit object with a default set of members.
-
- Args:
- attrs: dict of attributes to overwrite from the default values.
- Returns: A newly initialized Commit object.
- """
- default_time = 1262304000 # 2010-01-01 00:00:00
- all_attrs = {
- "author": b"Test Author <test@nodomain.com>",
- "author_time": default_time,
- "author_timezone": 0,
- "committer": b"Test Committer <test@nodomain.com>",
- "commit_time": default_time,
- "commit_timezone": 0,
- "message": b"Test message.",
- "parents": [],
- "tree": b"0" * 40,
- }
- all_attrs.update(attrs)
- return make_object(Commit, **all_attrs)
-
-
-def make_tag(target, **attrs):
- """Make a Tag object with a default set of values.
-
- Args:
- target: object to be tagged (Commit, Blob, Tree, etc)
- attrs: dict of attributes to overwrite from the default values.
- Returns: A newly initialized Tag object.
- """
- target_id = target.id
- target_type = object_class(target.type_name)
- default_time = int(time.mktime(datetime.datetime(2010, 1, 1).timetuple()))
- all_attrs = {
- "tagger": b"Test Author <test@nodomain.com>",
- "tag_time": default_time,
- "tag_timezone": 0,
- "message": b"Test message.",
- "object": (target_type, target_id),
- "name": b"Test Tag",
- }
- all_attrs.update(attrs)
- return make_object(Tag, **all_attrs)
-
-
-def functest_builder(method, func):
- """Generate a test method that tests the given function."""
-
- def do_test(self):
- method(self, func)
-
- return do_test
-
-
-def ext_functest_builder(method, func):
- """Generate a test method that tests the given extension function.
-
- This is intended to generate test methods that test both a pure-Python
- version and an extension version using common test code. The extension test
- will raise SkipTest if the extension is not found.
-
- Sample usage:
-
- class MyTest(TestCase);
- def _do_some_test(self, func_impl):
- self.assertEqual('foo', func_impl())
-
- test_foo = functest_builder(_do_some_test, foo_py)
- test_foo_extension = ext_functest_builder(_do_some_test, _foo_c)
-
- Args:
- method: The method to run. It must must two parameters, self and the
- function implementation to test.
- func: The function implementation to pass to method.
- """
-
- def do_test(self):
- if not isinstance(func, types.BuiltinFunctionType):
- raise SkipTest("%s extension not found" % func)
- method(self, func)
-
- return do_test
-
-
-def build_pack(f, objects_spec, store=None):
- """Write test pack data from a concise spec.
-
- Args:
- f: A file-like object to write the pack to.
- objects_spec: A list of (type_num, obj). For non-delta types, obj
- is the string of that object's data.
- For delta types, obj is a tuple of (base, data), where:
-
- * base can be either an index in objects_spec of the base for that
- * delta; or for a ref delta, a SHA, in which case the resulting pack
- * will be thin and the base will be an external ref.
- * data is a string of the full, non-deltified data for that object.
-
- Note that offsets/refs and deltas are computed within this function.
- store: An optional ObjectStore for looking up external refs.
- Returns: A list of tuples in the order specified by objects_spec:
- (offset, type num, data, sha, CRC32)
- """
- sf = SHA1Writer(f)
- num_objects = len(objects_spec)
- write_pack_header(sf.write, num_objects)
-
- full_objects = {}
- offsets = {}
- crc32s = {}
-
- while len(full_objects) < num_objects:
- for i, (type_num, data) in enumerate(objects_spec):
- if type_num not in DELTA_TYPES:
- full_objects[i] = (type_num, data, obj_sha(type_num, [data]))
- continue
- base, data = data
- if isinstance(base, int):
- if base not in full_objects:
- continue
- base_type_num, _, _ = full_objects[base]
- else:
- base_type_num, _ = store.get_raw(base)
- full_objects[i] = (
- base_type_num,
- data,
- obj_sha(base_type_num, [data]),
- )
-
- for i, (type_num, obj) in enumerate(objects_spec):
- offset = f.tell()
- if type_num == OFS_DELTA:
- base_index, data = obj
- base = offset - offsets[base_index]
- _, base_data, _ = full_objects[base_index]
- obj = (base, list(create_delta(base_data, data)))
- elif type_num == REF_DELTA:
- base_ref, data = obj
- if isinstance(base_ref, int):
- _, base_data, base = full_objects[base_ref]
- else:
- base_type_num, base_data = store.get_raw(base_ref)
- base = obj_sha(base_type_num, base_data)
- obj = (base, list(create_delta(base_data, data)))
-
- crc32 = write_pack_object(sf.write, type_num, obj)
- offsets[i] = offset
- crc32s[i] = crc32
-
- expected = []
- for i in range(num_objects):
- type_num, data, sha = full_objects[i]
- assert len(sha) == 20
- expected.append((offsets[i], type_num, data, sha, crc32s[i]))
-
- sf.write_sha()
- f.seek(0)
- return expected
-
-
-def build_commit_graph(object_store, commit_spec, trees=None, attrs=None):
- """Build a commit graph from a concise specification.
-
- Sample usage:
- >>> c1, c2, c3 = build_commit_graph(store, [[1], [2, 1], [3, 1, 2]])
- >>> store[store[c3].parents[0]] == c1
- True
- >>> store[store[c3].parents[1]] == c2
- True
-
- If not otherwise specified, commits will refer to the empty tree and have
- commit times increasing in the same order as the commit spec.
-
- Args:
- object_store: An ObjectStore to commit objects to.
- commit_spec: An iterable of iterables of ints defining the commit
- graph. Each entry defines one commit, and entries must be in
- topological order. The first element of each entry is a commit number,
- and the remaining elements are its parents. The commit numbers are only
- meaningful for the call to make_commits; since real commit objects are
- created, they will get created with real, opaque SHAs.
- trees: An optional dict of commit number -> tree spec for building
- trees for commits. The tree spec is an iterable of (path, blob, mode)
- or (path, blob) entries; if mode is omitted, it defaults to the normal
- file mode (0100644).
- attrs: A dict of commit number -> (dict of attribute -> value) for
- assigning additional values to the commits.
- Returns: The list of commit objects created.
-
- Raises:
- ValueError: If an undefined commit identifier is listed as a parent.
- """
- if trees is None:
- trees = {}
- if attrs is None:
- attrs = {}
- commit_time = 0
- nums = {}
- commits = []
-
- for commit in commit_spec:
- commit_num = commit[0]
- try:
- parent_ids = [nums[pn] for pn in commit[1:]]
- except KeyError as exc:
- (missing_parent,) = exc.args
- raise ValueError("Unknown parent %i" % missing_parent) from exc
-
- blobs = []
- for entry in trees.get(commit_num, []):
- if len(entry) == 2:
- path, blob = entry
- entry = (path, blob, F)
- path, blob, mode = entry
- blobs.append((path, blob.id, mode))
- object_store.add_object(blob)
- tree_id = commit_tree(object_store, blobs)
-
- commit_attrs = {
- "message": ("Commit %i" % commit_num).encode("ascii"),
- "parents": parent_ids,
- "tree": tree_id,
- "commit_time": commit_time,
- }
- commit_attrs.update(attrs.get(commit_num, {}))
- commit_obj = make_commit(**commit_attrs)
-
- # By default, increment the time by a lot. Out-of-order commits should
- # be closer together than this because their main cause is clock skew.
- commit_time = commit_attrs["commit_time"] + 100
- nums[commit_num] = commit_obj.id
- object_store.add_object(commit_obj)
- commits.append(commit_obj)
-
- return commits
-
-
-def setup_warning_catcher():
- """Wrap warnings.showwarning with code that records warnings."""
- caught_warnings = []
- original_showwarning = warnings.showwarning
-
- def custom_showwarning(*args, **kwargs):
- caught_warnings.append(args[0])
-
- warnings.showwarning = custom_showwarning
-
- def restore_showwarning():
- warnings.showwarning = original_showwarning
-
- return caught_warnings, restore_showwarning
blob - 6592b8f7b02b4cb3697fec52d1aac59bc7cbc15b
blob + f0fe520e9f172a33fe2da14ab395bb250ff5ea90
--- pyproject.toml
+++ pyproject.toml
packages = [
"dulwich",
"dulwich.cloud",
- "dulwich.tests",
- "dulwich.tests.compat",
"dulwich.contrib",
]
include-package-data = true
blob - /dev/null
blob + 52d587ddeb41fe1e24e26e2c62c88ae323f52f33 (mode 644)
--- /dev/null
+++ tests/__init__.py
+# __init__.py -- The tests for dulwich
+# Copyright (C) 2007 James Westby <jw+debian@jameswestby.net>
+#
+# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
+# General Public License as public by the Free Software Foundation; version 2.0
+# or (at your option) any later version. You can redistribute it and/or
+# modify it under the terms of either of these two licenses.
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# You should have received a copy of the licenses; if not, see
+# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
+# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
+# License, Version 2.0.
+#
+
+"""Tests for Dulwich."""
+
+__all__ = [
+ "SkipTest",
+ "TestCase",
+ "BlackboxTestCase",
+ "skipIf",
+ "expectedFailure",
+]
+
+import doctest
+import os
+import shutil
+import subprocess
+import sys
+import tempfile
+
+# If Python itself provides an exception, use that
+import unittest
+from typing import ClassVar, List
+from unittest import SkipTest, expectedFailure, skipIf
+from unittest import TestCase as _TestCase
+
+
+class TestCase(_TestCase):
+ def setUp(self):
+ super().setUp()
+ self.overrideEnv("HOME", "/nonexistent")
+ self.overrideEnv("GIT_CONFIG_NOSYSTEM", "1")
+
+ def overrideEnv(self, name, value):
+ def restore():
+ if oldval is not None:
+ os.environ[name] = oldval
+ else:
+ del os.environ[name]
+
+ oldval = os.environ.get(name)
+ if value is not None:
+ os.environ[name] = value
+ else:
+ del os.environ[name]
+ self.addCleanup(restore)
+
+
+class BlackboxTestCase(TestCase):
+ """Blackbox testing."""
+
+ # TODO(jelmer): Include more possible binary paths.
+ bin_directories: ClassVar[List[str]] = [
+ os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "..", "bin")),
+ "/usr/bin",
+ "/usr/local/bin",
+ ]
+
+ def bin_path(self, name):
+ """Determine the full path of a binary.
+
+ Args:
+ name: Name of the script
+ Returns: Full path
+ """
+ for d in self.bin_directories:
+ p = os.path.join(d, name)
+ if os.path.isfile(p):
+ return p
+ else:
+ raise SkipTest("Unable to find binary %s" % name)
+
+ def run_command(self, name, args):
+ """Run a Dulwich command.
+
+ Args:
+ name: Name of the command, as it exists in bin/
+ args: Arguments to the command
+ """
+ env = dict(os.environ)
+ env["PYTHONPATH"] = os.pathsep.join(sys.path)
+
+ # Since they don't have any extensions, Windows can't recognize
+ # executablility of the Python files in /bin. Even then, we'd have to
+ # expect the user to set up file associations for .py files.
+ #
+ # Save us from all that headache and call python with the bin script.
+ argv = [sys.executable, self.bin_path(name), *args]
+ return subprocess.Popen(
+ argv,
+ stdout=subprocess.PIPE,
+ stdin=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ env=env,
+ )
+
+
+def self_test_suite():
+ names = [
+ "archive",
+ "blackbox",
+ "bundle",
+ "client",
+ "config",
+ "credentials",
+ "diff_tree",
+ "fastexport",
+ "file",
+ "grafts",
+ "graph",
+ "greenthreads",
+ "hooks",
+ "ignore",
+ "index",
+ "lfs",
+ "line_ending",
+ "lru_cache",
+ "mailmap",
+ "objects",
+ "objectspec",
+ "object_store",
+ "missing_obj_finder",
+ "pack",
+ "patch",
+ "porcelain",
+ "protocol",
+ "reflog",
+ "refs",
+ "repository",
+ "server",
+ "stash",
+ "utils",
+ "walk",
+ "web",
+ ]
+ module_names = ["tests.test_" + name for name in names]
+ loader = unittest.TestLoader()
+ return loader.loadTestsFromNames(module_names)
+
+
+def tutorial_test_suite():
+ tutorial = [
+ "introduction",
+ "file-format",
+ "repo",
+ "object-store",
+ "remote",
+ "conclusion",
+ ]
+ tutorial_files = [f"../docs/tutorial/{name}.txt" for name in tutorial]
+
+ to_restore = []
+
+ def overrideEnv(name, value):
+ oldval = os.environ.get(name)
+ if value is not None:
+ os.environ[name] = value
+ else:
+ del os.environ[name]
+ to_restore.append((name, oldval))
+
+ def setup(test):
+ test.__old_cwd = os.getcwd()
+ test.tempdir = tempfile.mkdtemp()
+ test.globs.update({"tempdir": test.tempdir})
+ os.chdir(test.tempdir)
+ overrideEnv("HOME", "/nonexistent")
+ overrideEnv("GIT_CONFIG_NOSYSTEM", "1")
+
+ def teardown(test):
+ os.chdir(test.__old_cwd)
+ shutil.rmtree(test.tempdir)
+ for name, oldval in to_restore:
+ if oldval is not None:
+ os.environ[name] = oldval
+ else:
+ del os.environ[name]
+ to_restore.clear()
+
+ return doctest.DocFileSuite(
+ module_relative=True,
+ package="tests",
+ setUp=setup,
+ tearDown=teardown,
+ *tutorial_files,
+ )
+
+
+def nocompat_test_suite():
+ result = unittest.TestSuite()
+ result.addTests(self_test_suite())
+ result.addTests(tutorial_test_suite())
+ from dulwich.contrib import test_suite as contrib_test_suite
+
+ result.addTests(contrib_test_suite())
+ return result
+
+
+def compat_test_suite():
+ result = unittest.TestSuite()
+ from .compat import test_suite as compat_test_suite
+
+ result.addTests(compat_test_suite())
+ return result
+
+
+def test_suite():
+ result = unittest.TestSuite()
+ result.addTests(self_test_suite())
+ if sys.platform != "win32":
+ result.addTests(tutorial_test_suite())
+ from .compat import test_suite as compat_test_suite
+
+ result.addTests(compat_test_suite())
+ from .contrib import test_suite as contrib_test_suite
+
+ result.addTests(contrib_test_suite())
+ return result
blob - /dev/null
blob + 588937d5be873819ce6ec7a9844750fc6b02eeac (mode 644)
--- /dev/null
+++ tests/compat/__init__.py
+# __init__.py -- Compatibility tests for dulwich
+# Copyright (C) 2010 Jelmer Vernooij <jelmer@jelmer.uk>
+#
+# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
+# General Public License as public by the Free Software Foundation; version 2.0
+# or (at your option) any later version. You can redistribute it and/or
+# modify it under the terms of either of these two licenses.
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# You should have received a copy of the licenses; if not, see
+# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
+# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
+# License, Version 2.0.
+#
+
+"""Compatibility tests for Dulwich."""
+
+import unittest
+
+
+def test_suite():
+ names = [
+ "client",
+ "pack",
+ "patch",
+ "porcelain",
+ "repository",
+ "server",
+ "utils",
+ "web",
+ ]
+ module_names = ["tests.compat.test_" + name for name in names]
+ result = unittest.TestSuite()
+ loader = unittest.TestLoader()
+ suite = loader.loadTestsFromNames(module_names)
+ result.addTests(suite)
+ return result
blob - /dev/null
blob + cd06126c084b381fb590de441314ee90ec4626ba (mode 644)
--- /dev/null
+++ tests/compat/server_utils.py
+# server_utils.py -- Git server compatibility utilities
+# Copyright (C) 2010 Google, Inc.
+#
+# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
+# General Public License as public by the Free Software Foundation; version 2.0
+# or (at your option) any later version. You can redistribute it and/or
+# modify it under the terms of either of these two licenses.
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# You should have received a copy of the licenses; if not, see
+# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
+# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
+# License, Version 2.0.
+#
+
+"""Utilities for testing git server compatibility."""
+
+import errno
+import os
+import shutil
+import socket
+import tempfile
+
+from dulwich.objects import hex_to_sha
+from dulwich.protocol import CAPABILITY_SIDE_BAND_64K
+from dulwich.repo import Repo
+from dulwich.server import ReceivePackHandler
+
+from ..utils import tear_down_repo
+from .utils import require_git_version, run_git_or_fail
+
+
+class _StubRepo:
+ """A stub repo that just contains a path to tear down."""
+
+ def __init__(self, name) -> None:
+ temp_dir = tempfile.mkdtemp()
+ self.path = os.path.join(temp_dir, name)
+ os.mkdir(self.path)
+
+ def close(self):
+ pass
+
+
+def _get_shallow(repo):
+ shallow_file = repo.get_named_file("shallow")
+ if not shallow_file:
+ return []
+ shallows = []
+ with shallow_file:
+ for line in shallow_file:
+ sha = line.strip()
+ if not sha:
+ continue
+ hex_to_sha(sha)
+ shallows.append(sha)
+ return shallows
+
+
+class ServerTests:
+ """Base tests for testing servers.
+
+ Does not inherit from TestCase so tests are not automatically run.
+ """
+
+ min_single_branch_version = (
+ 1,
+ 7,
+ 10,
+ )
+
+ def import_repos(self):
+ self._old_repo = self.import_repo("server_old.export")
+ self._new_repo = self.import_repo("server_new.export")
+
+ def url(self, port):
+ return f"{self.protocol}://localhost:{port}/"
+
+ def branch_args(self, branches=None):
+ if branches is None:
+ branches = ["master", "branch"]
+ return [f"{b}:{b}" for b in branches]
+
+ def test_push_to_dulwich(self):
+ self.import_repos()
+ self.assertReposNotEqual(self._old_repo, self._new_repo)
+ port = self._start_server(self._old_repo)
+
+ run_git_or_fail(
+ ["push", self.url(port), *self.branch_args()],
+ cwd=self._new_repo.path,
+ )
+ self.assertReposEqual(self._old_repo, self._new_repo)
+
+ def test_push_to_dulwich_no_op(self):
+ self._old_repo = self.import_repo("server_old.export")
+ self._new_repo = self.import_repo("server_old.export")
+ self.assertReposEqual(self._old_repo, self._new_repo)
+ port = self._start_server(self._old_repo)
+
+ run_git_or_fail(
+ ["push", self.url(port), *self.branch_args()],
+ cwd=self._new_repo.path,
+ )
+ self.assertReposEqual(self._old_repo, self._new_repo)
+
+ def test_push_to_dulwich_remove_branch(self):
+ self._old_repo = self.import_repo("server_old.export")
+ self._new_repo = self.import_repo("server_old.export")
+ self.assertReposEqual(self._old_repo, self._new_repo)
+ port = self._start_server(self._old_repo)
+
+ run_git_or_fail(["push", self.url(port), ":master"], cwd=self._new_repo.path)
+
+ self.assertEqual(list(self._old_repo.get_refs().keys()), [b"refs/heads/branch"])
+
+ def test_fetch_from_dulwich(self):
+ self.import_repos()
+ self.assertReposNotEqual(self._old_repo, self._new_repo)
+ port = self._start_server(self._new_repo)
+
+ run_git_or_fail(
+ ["fetch", self.url(port), *self.branch_args()],
+ cwd=self._old_repo.path,
+ )
+ # flush the pack cache so any new packs are picked up
+ self._old_repo.object_store._pack_cache_time = 0
+ self.assertReposEqual(self._old_repo, self._new_repo)
+
+ def test_fetch_from_dulwich_no_op(self):
+ self._old_repo = self.import_repo("server_old.export")
+ self._new_repo = self.import_repo("server_old.export")
+ self.assertReposEqual(self._old_repo, self._new_repo)
+ port = self._start_server(self._new_repo)
+
+ run_git_or_fail(
+ ["fetch", self.url(port), *self.branch_args()],
+ cwd=self._old_repo.path,
+ )
+ # flush the pack cache so any new packs are picked up
+ self._old_repo.object_store._pack_cache_time = 0
+ self.assertReposEqual(self._old_repo, self._new_repo)
+
+ def test_clone_from_dulwich_empty(self):
+ old_repo_dir = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, old_repo_dir)
+ self._old_repo = Repo.init_bare(old_repo_dir)
+ port = self._start_server(self._old_repo)
+
+ new_repo_base_dir = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, new_repo_base_dir)
+ new_repo_dir = os.path.join(new_repo_base_dir, "empty_new")
+ run_git_or_fail(["clone", self.url(port), new_repo_dir], cwd=new_repo_base_dir)
+ new_repo = Repo(new_repo_dir)
+ self.assertReposEqual(self._old_repo, new_repo)
+
+ def test_lsremote_from_dulwich(self):
+ self._repo = self.import_repo("server_old.export")
+ port = self._start_server(self._repo)
+ o = run_git_or_fail(["ls-remote", self.url(port)])
+ self.assertEqual(len(o.split(b"\n")), 4)
+
+ def test_new_shallow_clone_from_dulwich(self):
+ require_git_version(self.min_single_branch_version)
+ self._source_repo = self.import_repo("server_new.export")
+ self._stub_repo = _StubRepo("shallow")
+ self.addCleanup(tear_down_repo, self._stub_repo)
+ port = self._start_server(self._source_repo)
+
+ # Fetch at depth 1
+ run_git_or_fail(
+ [
+ "clone",
+ "--mirror",
+ "--depth=1",
+ "--no-single-branch",
+ self.url(port),
+ self._stub_repo.path,
+ ]
+ )
+ clone = self._stub_repo = Repo(self._stub_repo.path)
+ expected_shallow = [
+ b"35e0b59e187dd72a0af294aedffc213eaa4d03ff",
+ b"514dc6d3fbfe77361bcaef320c4d21b72bc10be9",
+ ]
+ self.assertEqual(expected_shallow, _get_shallow(clone))
+ self.assertReposNotEqual(clone, self._source_repo)
+
+ def test_shallow_clone_from_git_is_identical(self):
+ require_git_version(self.min_single_branch_version)
+ self._source_repo = self.import_repo("server_new.export")
+ self._stub_repo_git = _StubRepo("shallow-git")
+ self.addCleanup(tear_down_repo, self._stub_repo_git)
+ self._stub_repo_dw = _StubRepo("shallow-dw")
+ self.addCleanup(tear_down_repo, self._stub_repo_dw)
+
+ # shallow clone using stock git, then using dulwich
+ run_git_or_fail(
+ [
+ "clone",
+ "--mirror",
+ "--depth=1",
+ "--no-single-branch",
+ "file://" + self._source_repo.path,
+ self._stub_repo_git.path,
+ ]
+ )
+
+ port = self._start_server(self._source_repo)
+ run_git_or_fail(
+ [
+ "clone",
+ "--mirror",
+ "--depth=1",
+ "--no-single-branch",
+ self.url(port),
+ self._stub_repo_dw.path,
+ ]
+ )
+
+ # compare the two clones; they should be equal
+ self.assertReposEqual(
+ Repo(self._stub_repo_git.path), Repo(self._stub_repo_dw.path)
+ )
+
+ def test_fetch_same_depth_into_shallow_clone_from_dulwich(self):
+ require_git_version(self.min_single_branch_version)
+ self._source_repo = self.import_repo("server_new.export")
+ self._stub_repo = _StubRepo("shallow")
+ self.addCleanup(tear_down_repo, self._stub_repo)
+ port = self._start_server(self._source_repo)
+
+ # Fetch at depth 2
+ run_git_or_fail(
+ [
+ "clone",
+ "--mirror",
+ "--depth=2",
+ "--no-single-branch",
+ self.url(port),
+ self._stub_repo.path,
+ ]
+ )
+ clone = self._stub_repo = Repo(self._stub_repo.path)
+
+ # Fetching at the same depth is a no-op.
+ run_git_or_fail(
+ ["fetch", "--depth=2", self.url(port), *self.branch_args()],
+ cwd=self._stub_repo.path,
+ )
+ expected_shallow = [
+ b"94de09a530df27ac3bb613aaecdd539e0a0655e1",
+ b"da5cd81e1883c62a25bb37c4d1f8ad965b29bf8d",
+ ]
+ self.assertEqual(expected_shallow, _get_shallow(clone))
+ self.assertReposNotEqual(clone, self._source_repo)
+
+ def test_fetch_full_depth_into_shallow_clone_from_dulwich(self):
+ require_git_version(self.min_single_branch_version)
+ self._source_repo = self.import_repo("server_new.export")
+ self._stub_repo = _StubRepo("shallow")
+ self.addCleanup(tear_down_repo, self._stub_repo)
+ port = self._start_server(self._source_repo)
+
+ # Fetch at depth 2
+ run_git_or_fail(
+ [
+ "clone",
+ "--mirror",
+ "--depth=2",
+ "--no-single-branch",
+ self.url(port),
+ self._stub_repo.path,
+ ]
+ )
+ clone = self._stub_repo = Repo(self._stub_repo.path)
+
+ # Fetching at the same depth is a no-op.
+ run_git_or_fail(
+ ["fetch", "--depth=2", self.url(port), *self.branch_args()],
+ cwd=self._stub_repo.path,
+ )
+
+ # The whole repo only has depth 4, so it should equal server_new.
+ run_git_or_fail(
+ ["fetch", "--depth=4", self.url(port), *self.branch_args()],
+ cwd=self._stub_repo.path,
+ )
+ self.assertEqual([], _get_shallow(clone))
+ self.assertReposEqual(clone, self._source_repo)
+
+ def test_fetch_from_dulwich_issue_88_standard(self):
+ # Basically an integration test to see that the ACK/NAK
+ # generation works on repos with common head.
+ self._source_repo = self.import_repo("issue88_expect_ack_nak_server.export")
+ self._client_repo = self.import_repo("issue88_expect_ack_nak_client.export")
+ port = self._start_server(self._source_repo)
+
+ run_git_or_fail(["fetch", self.url(port), "master"], cwd=self._client_repo.path)
+ self.assertObjectStoreEqual(
+ self._source_repo.object_store, self._client_repo.object_store
+ )
+
+ def test_fetch_from_dulwich_issue_88_alternative(self):
+ # likewise, but the case where the two repos have no common parent
+ self._source_repo = self.import_repo("issue88_expect_ack_nak_other.export")
+ self._client_repo = self.import_repo("issue88_expect_ack_nak_client.export")
+ port = self._start_server(self._source_repo)
+
+ self.assertRaises(
+ KeyError,
+ self._client_repo.get_object,
+ b"02a14da1fc1fc13389bbf32f0af7d8899f2b2323",
+ )
+ run_git_or_fail(["fetch", self.url(port), "master"], cwd=self._client_repo.path)
+ self.assertEqual(
+ b"commit",
+ self._client_repo.get_object(
+ b"02a14da1fc1fc13389bbf32f0af7d8899f2b2323"
+ ).type_name,
+ )
+
+ def test_push_to_dulwich_issue_88_standard(self):
+ # Same thing, but we reverse the role of the server/client
+ # and do a push instead.
+ self._source_repo = self.import_repo("issue88_expect_ack_nak_client.export")
+ self._client_repo = self.import_repo("issue88_expect_ack_nak_server.export")
+ port = self._start_server(self._source_repo)
+
+ run_git_or_fail(["push", self.url(port), "master"], cwd=self._client_repo.path)
+ self.assertReposEqual(self._source_repo, self._client_repo)
+
+
+# TODO(dborowitz): Come up with a better way of testing various permutations of
+# capabilities. The only reason it is the way it is now is that side-band-64k
+# was only recently introduced into git-receive-pack.
+class NoSideBand64kReceivePackHandler(ReceivePackHandler):
+ """ReceivePackHandler that does not support side-band-64k."""
+
+ @classmethod
+ def capabilities(cls):
+ return [
+ c
+ for c in ReceivePackHandler.capabilities()
+ if c != CAPABILITY_SIDE_BAND_64K
+ ]
+
+
+def ignore_error(error):
+ """Check whether this error is safe to ignore."""
+ (e_type, e_value, e_tb) = error
+ return issubclass(e_type, socket.error) and e_value[0] in (
+ errno.ECONNRESET,
+ errno.EPIPE,
+ )
blob - /dev/null
blob + e86d392824c1e4f8f0cccfe7e987ea9a37eb545d (mode 644)
--- /dev/null
+++ tests/compat/test_client.py
+# test_client.py -- Compatibility tests for git client.
+# Copyright (C) 2010 Google, Inc.
+#
+# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
+# General Public License as public by the Free Software Foundation; version 2.0
+# or (at your option) any later version. You can redistribute it and/or
+# modify it under the terms of either of these two licenses.
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# You should have received a copy of the licenses; if not, see
+# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
+# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
+# License, Version 2.0.
+#
+
+"""Compatibility tests between the Dulwich client and the cgit server."""
+
+import copy
+import http.server
+import os
+import select
+import signal
+import stat
+import subprocess
+import sys
+import tarfile
+import tempfile
+import threading
+from contextlib import suppress
+from io import BytesIO
+from urllib.parse import unquote
+
+from dulwich import client, file, index, objects, protocol, repo
+
+from .. import SkipTest, expectedFailure
+from .utils import (
+ _DEFAULT_GIT,
+ CompatTestCase,
+ check_for_daemon,
+ import_repo_to_dir,
+ rmtree_ro,
+ run_git_or_fail,
+)
+
+if sys.platform == "win32":
+ import ctypes
+
+
+class DulwichClientTestBase:
+ """Tests for client/server compatibility."""
+
+ def setUp(self):
+ self.gitroot = os.path.dirname(
+ import_repo_to_dir("server_new.export").rstrip(os.sep)
+ )
+ self.dest = os.path.join(self.gitroot, "dest")
+ file.ensure_dir_exists(self.dest)
+ run_git_or_fail(["init", "--quiet", "--bare"], cwd=self.dest)
+
+ def tearDown(self):
+ rmtree_ro(self.gitroot)
+
+ def assertDestEqualsSrc(self):
+ repo_dir = os.path.join(self.gitroot, "server_new.export")
+ dest_repo_dir = os.path.join(self.gitroot, "dest")
+ with repo.Repo(repo_dir) as src:
+ with repo.Repo(dest_repo_dir) as dest:
+ self.assertReposEqual(src, dest)
+
+ def _client(self):
+ raise NotImplementedError
+
+ def _build_path(self):
+ raise NotImplementedError
+
+ def _do_send_pack(self):
+ c = self._client()
+ srcpath = os.path.join(self.gitroot, "server_new.export")
+ with repo.Repo(srcpath) as src:
+ sendrefs = dict(src.get_refs())
+ del sendrefs[b"HEAD"]
+ c.send_pack(
+ self._build_path("/dest"),
+ lambda _: sendrefs,
+ src.generate_pack_data,
+ )
+
+ def test_send_pack(self):
+ self._do_send_pack()
+ self.assertDestEqualsSrc()
+
+ def test_send_pack_nothing_to_send(self):
+ self._do_send_pack()
+ self.assertDestEqualsSrc()
+ # nothing to send, but shouldn't raise either.
+ self._do_send_pack()
+
+ @staticmethod
+ def _add_file(repo, tree_id, filename, contents):
+ tree = repo[tree_id]
+ blob = objects.Blob()
+ blob.data = contents.encode("utf-8")
+ repo.object_store.add_object(blob)
+ tree.add(filename.encode("utf-8"), stat.S_IFREG | 0o644, blob.id)
+ repo.object_store.add_object(tree)
+ return tree.id
+
+ def test_send_pack_from_shallow_clone(self):
+ c = self._client()
+ server_new_path = os.path.join(self.gitroot, "server_new.export")
+ run_git_or_fail(["config", "http.uploadpack", "true"], cwd=server_new_path)
+ run_git_or_fail(["config", "http.receivepack", "true"], cwd=server_new_path)
+ remote_path = self._build_path("/server_new.export")
+ with repo.Repo(self.dest) as local:
+ result = c.fetch(remote_path, local, depth=1)
+ for r in result.refs.items():
+ local.refs.set_if_equals(r[0], None, r[1])
+ tree_id = local[local.head()].tree
+ for filename, contents in [
+ ("bar", "bar contents"),
+ ("zop", "zop contents"),
+ ]:
+ tree_id = self._add_file(local, tree_id, filename, contents)
+ commit_id = local.do_commit(
+ message=b"add " + filename.encode("utf-8"),
+ committer=b"Joe Example <joe@example.com>",
+ tree=tree_id,
+ )
+ sendrefs = dict(local.get_refs())
+ del sendrefs[b"HEAD"]
+ c.send_pack(remote_path, lambda _: sendrefs, local.generate_pack_data)
+ with repo.Repo(server_new_path) as remote:
+ self.assertEqual(remote.head(), commit_id)
+
+ def test_send_without_report_status(self):
+ c = self._client()
+ c._send_capabilities.remove(b"report-status")
+ srcpath = os.path.join(self.gitroot, "server_new.export")
+ with repo.Repo(srcpath) as src:
+ sendrefs = dict(src.get_refs())
+ del sendrefs[b"HEAD"]
+ c.send_pack(
+ self._build_path("/dest"),
+ lambda _: sendrefs,
+ src.generate_pack_data,
+ )
+ self.assertDestEqualsSrc()
+
+ def make_dummy_commit(self, dest):
+ b = objects.Blob.from_string(b"hi")
+ dest.object_store.add_object(b)
+ t = index.commit_tree(dest.object_store, [(b"hi", b.id, 0o100644)])
+ c = objects.Commit()
+ c.author = c.committer = b"Foo Bar <foo@example.com>"
+ c.author_time = c.commit_time = 0
+ c.author_timezone = c.commit_timezone = 0
+ c.message = b"hi"
+ c.tree = t
+ dest.object_store.add_object(c)
+ return c.id
+
+ def disable_ff_and_make_dummy_commit(self):
+ # disable non-fast-forward pushes to the server
+ dest = repo.Repo(os.path.join(self.gitroot, "dest"))
+ run_git_or_fail(
+ ["config", "receive.denyNonFastForwards", "true"], cwd=dest.path
+ )
+ commit_id = self.make_dummy_commit(dest)
+ return dest, commit_id
+
+ def compute_send(self, src):
+ sendrefs = dict(src.get_refs())
+ del sendrefs[b"HEAD"]
+ return sendrefs, src.generate_pack_data
+
+ def test_send_pack_one_error(self):
+ dest, dummy_commit = self.disable_ff_and_make_dummy_commit()
+ dest.refs[b"refs/heads/master"] = dummy_commit
+ repo_dir = os.path.join(self.gitroot, "server_new.export")
+ with repo.Repo(repo_dir) as src:
+ sendrefs, gen_pack = self.compute_send(src)
+ c = self._client()
+ result = c.send_pack(
+ self._build_path("/dest"), lambda _: sendrefs, gen_pack
+ )
+ self.assertEqual(
+ {
+ b"refs/heads/branch": None,
+ b"refs/heads/master": "non-fast-forward",
+ },
+ result.ref_status,
+ )
+
+ def test_send_pack_multiple_errors(self):
+ dest, dummy = self.disable_ff_and_make_dummy_commit()
+ # set up for two non-ff errors
+ branch, master = b"refs/heads/branch", b"refs/heads/master"
+ dest.refs[branch] = dest.refs[master] = dummy
+ repo_dir = os.path.join(self.gitroot, "server_new.export")
+ with repo.Repo(repo_dir) as src:
+ sendrefs, gen_pack = self.compute_send(src)
+ c = self._client()
+ result = c.send_pack(
+ self._build_path("/dest"), lambda _: sendrefs, gen_pack
+ )
+ self.assertEqual(
+ {branch: "non-fast-forward", master: "non-fast-forward"},
+ result.ref_status,
+ )
+
+ def test_archive(self):
+ c = self._client()
+ f = BytesIO()
+ c.archive(self._build_path("/server_new.export"), b"HEAD", f.write)
+ f.seek(0)
+ tf = tarfile.open(fileobj=f)
+ self.assertEqual(["baz", "foo"], tf.getnames())
+
+ def test_fetch_pack(self):
+ c = self._client()
+ with repo.Repo(os.path.join(self.gitroot, "dest")) as dest:
+ result = c.fetch(self._build_path("/server_new.export"), dest)
+ for r in result.refs.items():
+ dest.refs.set_if_equals(r[0], None, r[1])
+ self.assertDestEqualsSrc()
+
+ def test_fetch_pack_depth(self):
+ c = self._client()
+ with repo.Repo(os.path.join(self.gitroot, "dest")) as dest:
+ result = c.fetch(self._build_path("/server_new.export"), dest, depth=1)
+ for r in result.refs.items():
+ dest.refs.set_if_equals(r[0], None, r[1])
+ self.assertEqual(
+ dest.get_shallow(),
+ {
+ b"35e0b59e187dd72a0af294aedffc213eaa4d03ff",
+ b"514dc6d3fbfe77361bcaef320c4d21b72bc10be9",
+ },
+ )
+
+ def test_repeat(self):
+ c = self._client()
+ with repo.Repo(os.path.join(self.gitroot, "dest")) as dest:
+ result = c.fetch(self._build_path("/server_new.export"), dest)
+ for r in result.refs.items():
+ dest.refs.set_if_equals(r[0], None, r[1])
+ self.assertDestEqualsSrc()
+ result = c.fetch(self._build_path("/server_new.export"), dest)
+ for r in result.refs.items():
+ dest.refs.set_if_equals(r[0], None, r[1])
+ self.assertDestEqualsSrc()
+
+ def test_fetch_empty_pack(self):
+ c = self._client()
+ with repo.Repo(os.path.join(self.gitroot, "dest")) as dest:
+ result = c.fetch(self._build_path("/server_new.export"), dest)
+ for r in result.refs.items():
+ dest.refs.set_if_equals(r[0], None, r[1])
+ self.assertDestEqualsSrc()
+
+ def dw(refs, **kwargs):
+ return list(refs.values())
+
+ result = c.fetch(
+ self._build_path("/server_new.export"),
+ dest,
+ determine_wants=dw,
+ )
+ for r in result.refs.items():
+ dest.refs.set_if_equals(r[0], None, r[1])
+ self.assertDestEqualsSrc()
+
+ def test_incremental_fetch_pack(self):
+ self.test_fetch_pack()
+ dest, dummy = self.disable_ff_and_make_dummy_commit()
+ dest.refs[b"refs/heads/master"] = dummy
+ c = self._client()
+ repo_dir = os.path.join(self.gitroot, "server_new.export")
+ with repo.Repo(repo_dir) as dest:
+ result = c.fetch(self._build_path("/dest"), dest)
+ for r in result.refs.items():
+ dest.refs.set_if_equals(r[0], None, r[1])
+ self.assertDestEqualsSrc()
+
+ def test_fetch_pack_no_side_band_64k(self):
+ c = self._client()
+ c._fetch_capabilities.remove(b"side-band-64k")
+ with repo.Repo(os.path.join(self.gitroot, "dest")) as dest:
+ result = c.fetch(self._build_path("/server_new.export"), dest)
+ for r in result.refs.items():
+ dest.refs.set_if_equals(r[0], None, r[1])
+ self.assertDestEqualsSrc()
+
+ def test_fetch_pack_zero_sha(self):
+ # zero sha1s are already present on the client, and should
+ # be ignored
+ c = self._client()
+ with repo.Repo(os.path.join(self.gitroot, "dest")) as dest:
+ result = c.fetch(
+ self._build_path("/server_new.export"),
+ dest,
+ lambda refs, **kwargs: [protocol.ZERO_SHA],
+ )
+ for r in result.refs.items():
+ dest.refs.set_if_equals(r[0], None, r[1])
+
+ def test_send_remove_branch(self):
+ with repo.Repo(os.path.join(self.gitroot, "dest")) as dest:
+ dummy_commit = self.make_dummy_commit(dest)
+ dest.refs[b"refs/heads/master"] = dummy_commit
+ dest.refs[b"refs/heads/abranch"] = dummy_commit
+ sendrefs = dict(dest.refs)
+ sendrefs[b"refs/heads/abranch"] = b"00" * 20
+ del sendrefs[b"HEAD"]
+
+ def gen_pack(have, want, ofs_delta=False, progress=None):
+ return 0, []
+
+ c = self._client()
+ self.assertEqual(dest.refs[b"refs/heads/abranch"], dummy_commit)
+ c.send_pack(self._build_path("/dest"), lambda _: sendrefs, gen_pack)
+ self.assertNotIn(b"refs/heads/abranch", dest.refs)
+
+ def test_send_new_branch_empty_pack(self):
+ with repo.Repo(os.path.join(self.gitroot, "dest")) as dest:
+ dummy_commit = self.make_dummy_commit(dest)
+ dest.refs[b"refs/heads/master"] = dummy_commit
+ dest.refs[b"refs/heads/abranch"] = dummy_commit
+ sendrefs = {b"refs/heads/bbranch": dummy_commit}
+
+ def gen_pack(have, want, ofs_delta=False, progress=None):
+ return 0, []
+
+ c = self._client()
+ self.assertEqual(dest.refs[b"refs/heads/abranch"], dummy_commit)
+ c.send_pack(self._build_path("/dest"), lambda _: sendrefs, gen_pack)
+ self.assertEqual(dummy_commit, dest.refs[b"refs/heads/abranch"])
+
+ def test_get_refs(self):
+ c = self._client()
+ refs = c.get_refs(self._build_path("/server_new.export"))
+
+ repo_dir = os.path.join(self.gitroot, "server_new.export")
+ with repo.Repo(repo_dir) as dest:
+ self.assertDictEqual(dest.refs.as_dict(), refs)
+
+
+class DulwichTCPClientTest(CompatTestCase, DulwichClientTestBase):
+ def setUp(self):
+ CompatTestCase.setUp(self)
+ DulwichClientTestBase.setUp(self)
+ if check_for_daemon(limit=1):
+ raise SkipTest(
+ "git-daemon was already running on port %s" % protocol.TCP_GIT_PORT
+ )
+ fd, self.pidfile = tempfile.mkstemp(
+ prefix="dulwich-test-git-client", suffix=".pid"
+ )
+ os.fdopen(fd).close()
+ args = [
+ _DEFAULT_GIT,
+ "daemon",
+ "--verbose",
+ "--export-all",
+ "--pid-file=%s" % self.pidfile,
+ "--base-path=%s" % self.gitroot,
+ "--enable=receive-pack",
+ "--enable=upload-archive",
+ "--listen=localhost",
+ "--reuseaddr",
+ self.gitroot,
+ ]
+ self.process = subprocess.Popen(
+ args,
+ cwd=self.gitroot,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ )
+ if not check_for_daemon():
+ raise SkipTest("git-daemon failed to start")
+
+ def tearDown(self):
+ with open(self.pidfile) as f:
+ pid = int(f.read().strip())
+ if sys.platform == "win32":
+ PROCESS_TERMINATE = 1
+ handle = ctypes.windll.kernel32.OpenProcess(PROCESS_TERMINATE, False, pid)
+ ctypes.windll.kernel32.TerminateProcess(handle, -1)
+ ctypes.windll.kernel32.CloseHandle(handle)
+ else:
+ with suppress(OSError):
+ os.kill(pid, signal.SIGKILL)
+ os.unlink(self.pidfile)
+ self.process.wait()
+ self.process.stdout.close()
+ self.process.stderr.close()
+ DulwichClientTestBase.tearDown(self)
+ CompatTestCase.tearDown(self)
+
+ def _client(self):
+ return client.TCPGitClient("localhost")
+
+ def _build_path(self, path):
+ return path
+
+ if sys.platform == "win32":
+
+ @expectedFailure
+ def test_fetch_pack_no_side_band_64k(self):
+ DulwichClientTestBase.test_fetch_pack_no_side_band_64k(self)
+
+ def test_send_remove_branch(self):
+ # This test fails intermittently on my machine, probably due to some sort
+ # of race condition. Probably also related to #1015
+ self.skipTest("skip flaky test; see #1015")
+
+
+class TestSSHVendor:
+ @staticmethod
+ def run_command(
+ host,
+ command,
+ username=None,
+ port=None,
+ password=None,
+ key_filename=None,
+ ):
+ cmd, path = command.split(" ")
+ cmd = cmd.split("-", 1)
+ path = path.replace("'", "")
+ p = subprocess.Popen(
+ [*cmd, path],
+ bufsize=0,
+ stdin=subprocess.PIPE,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ )
+ return client.SubprocessWrapper(p)
+
+
+class DulwichMockSSHClientTest(CompatTestCase, DulwichClientTestBase):
+ def setUp(self):
+ CompatTestCase.setUp(self)
+ DulwichClientTestBase.setUp(self)
+ self.real_vendor = client.get_ssh_vendor
+ client.get_ssh_vendor = TestSSHVendor
+
+ def tearDown(self):
+ DulwichClientTestBase.tearDown(self)
+ CompatTestCase.tearDown(self)
+ client.get_ssh_vendor = self.real_vendor
+
+ def _client(self):
+ return client.SSHGitClient("localhost")
+
+ def _build_path(self, path):
+ return self.gitroot + path
+
+
+class DulwichSubprocessClientTest(CompatTestCase, DulwichClientTestBase):
+ def setUp(self):
+ CompatTestCase.setUp(self)
+ DulwichClientTestBase.setUp(self)
+
+ def tearDown(self):
+ DulwichClientTestBase.tearDown(self)
+ CompatTestCase.tearDown(self)
+
+ def _client(self):
+ return client.SubprocessGitClient()
+
+ def _build_path(self, path):
+ return self.gitroot + path
+
+
+class GitHTTPRequestHandler(http.server.SimpleHTTPRequestHandler):
+ """HTTP Request handler that calls out to 'git http-backend'."""
+
+ # Make rfile unbuffered -- we need to read one line and then pass
+ # the rest to a subprocess, so we can't use buffered input.
+ rbufsize = 0
+
+ def do_POST(self):
+ self.run_backend()
+
+ def do_GET(self):
+ self.run_backend()
+
+ def send_head(self):
+ return self.run_backend()
+
+ def log_request(self, code="-", size="-"):
+ # Let's be quiet, the test suite is noisy enough already
+ pass
+
+ def run_backend(self):
+ """Call out to git http-backend."""
+ # Based on CGIHTTPServer.CGIHTTPRequestHandler.run_cgi:
+ # Copyright (c) 2001-2010 Python Software Foundation;
+ # All Rights Reserved
+ # Licensed under the Python Software Foundation License.
+ rest = self.path
+ # find an explicit query string, if present.
+ i = rest.rfind("?")
+ if i >= 0:
+ rest, query = rest[:i], rest[i + 1 :]
+ else:
+ query = ""
+
+ env = copy.deepcopy(os.environ)
+ env["SERVER_SOFTWARE"] = self.version_string()
+ env["SERVER_NAME"] = self.server.server_name
+ env["GATEWAY_INTERFACE"] = "CGI/1.1"
+ env["SERVER_PROTOCOL"] = self.protocol_version
+ env["SERVER_PORT"] = str(self.server.server_port)
+ env["GIT_PROJECT_ROOT"] = self.server.root_path
+ env["GIT_HTTP_EXPORT_ALL"] = "1"
+ env["REQUEST_METHOD"] = self.command
+ uqrest = unquote(rest)
+ env["PATH_INFO"] = uqrest
+ env["SCRIPT_NAME"] = "/"
+ if query:
+ env["QUERY_STRING"] = query
+ host = self.address_string()
+ if host != self.client_address[0]:
+ env["REMOTE_HOST"] = host
+ env["REMOTE_ADDR"] = self.client_address[0]
+ authorization = self.headers.get("authorization")
+ if authorization:
+ authorization = authorization.split()
+ if len(authorization) == 2:
+ import base64
+ import binascii
+
+ env["AUTH_TYPE"] = authorization[0]
+ if authorization[0].lower() == "basic":
+ try:
+ authorization = base64.decodestring(authorization[1])
+ except binascii.Error:
+ pass
+ else:
+ authorization = authorization.split(":")
+ if len(authorization) == 2:
+ env["REMOTE_USER"] = authorization[0]
+ # XXX REMOTE_IDENT
+ content_type = self.headers.get("content-type")
+ if content_type:
+ env["CONTENT_TYPE"] = content_type
+ length = self.headers.get("content-length")
+ if length:
+ env["CONTENT_LENGTH"] = length
+ referer = self.headers.get("referer")
+ if referer:
+ env["HTTP_REFERER"] = referer
+ accept = []
+ for line in self.headers.getallmatchingheaders("accept"):
+ if line[:1] in "\t\n\r ":
+ accept.append(line.strip())
+ else:
+ accept = accept + line[7:].split(",")
+ env["HTTP_ACCEPT"] = ",".join(accept)
+ ua = self.headers.get("user-agent")
+ if ua:
+ env["HTTP_USER_AGENT"] = ua
+ co = self.headers.get("cookie")
+ if co:
+ env["HTTP_COOKIE"] = co
+ # XXX Other HTTP_* headers
+ # Since we're setting the env in the parent, provide empty
+ # values to override previously set values
+ for k in (
+ "QUERY_STRING",
+ "REMOTE_HOST",
+ "CONTENT_LENGTH",
+ "HTTP_USER_AGENT",
+ "HTTP_COOKIE",
+ "HTTP_REFERER",
+ ):
+ env.setdefault(k, "")
+
+ self.wfile.write(b"HTTP/1.1 200 Script output follows\r\n")
+ self.wfile.write(("Server: %s\r\n" % self.server.server_name).encode("ascii"))
+ self.wfile.write(("Date: %s\r\n" % self.date_time_string()).encode("ascii"))
+
+ decoded_query = query.replace("+", " ")
+
+ try:
+ nbytes = int(length)
+ except (TypeError, ValueError):
+ nbytes = -1
+ if self.command.lower() == "post":
+ if nbytes > 0:
+ data = self.rfile.read(nbytes)
+ elif self.headers.get("transfer-encoding") == "chunked":
+ chunks = []
+ while True:
+ line = self.rfile.readline()
+ length = int(line.rstrip(), 16)
+ chunk = self.rfile.read(length + 2)
+ chunks.append(chunk[:-2])
+ if length == 0:
+ break
+ data = b"".join(chunks)
+ env["CONTENT_LENGTH"] = str(len(data))
+ else:
+ raise AssertionError
+ else:
+ data = None
+ env["CONTENT_LENGTH"] = "0"
+ # throw away additional data [see bug #427345]
+ while select.select([self.rfile._sock], [], [], 0)[0]:
+ if not self.rfile._sock.recv(1):
+ break
+ args = ["http-backend"]
+ if "=" not in decoded_query:
+ args.append(decoded_query)
+ stdout = run_git_or_fail(args, input=data, env=env, stderr=subprocess.PIPE)
+ self.wfile.write(stdout)
+
+
+class HTTPGitServer(http.server.HTTPServer):
+ allow_reuse_address = True
+
+ def __init__(self, server_address, root_path) -> None:
+ http.server.HTTPServer.__init__(self, server_address, GitHTTPRequestHandler)
+ self.root_path = root_path
+ self.server_name = "localhost"
+
+ def get_url(self):
+ return f"http://{self.server_name}:{self.server_port}/"
+
+
+class DulwichHttpClientTest(CompatTestCase, DulwichClientTestBase):
+ min_git_version = (1, 7, 0, 2)
+
+ def setUp(self):
+ CompatTestCase.setUp(self)
+ DulwichClientTestBase.setUp(self)
+ self._httpd = HTTPGitServer(("localhost", 0), self.gitroot)
+ self.addCleanup(self._httpd.shutdown)
+ threading.Thread(target=self._httpd.serve_forever).start()
+ run_git_or_fail(["config", "http.uploadpack", "true"], cwd=self.dest)
+ run_git_or_fail(["config", "http.receivepack", "true"], cwd=self.dest)
+
+ def tearDown(self):
+ DulwichClientTestBase.tearDown(self)
+ CompatTestCase.tearDown(self)
+ self._httpd.shutdown()
+ self._httpd.socket.close()
+
+ def _client(self):
+ return client.HttpGitClient(self._httpd.get_url())
+
+ def _build_path(self, path):
+ return path
+
+ def test_archive(self):
+ raise SkipTest("exporting archives not supported over http")
blob - /dev/null
blob + d8b554d8aee7758d9701c4d632cb356347070c20 (mode 644)
--- /dev/null
+++ tests/compat/test_pack.py
+# test_pack.py -- Compatibility tests for git packs.
+# Copyright (C) 2010 Google, Inc.
+#
+# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
+# General Public License as public by the Free Software Foundation; version 2.0
+# or (at your option) any later version. You can redistribute it and/or
+# modify it under the terms of either of these two licenses.
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# You should have received a copy of the licenses; if not, see
+# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
+# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
+# License, Version 2.0.
+#
+
+"""Compatibility tests for git packs."""
+
+import binascii
+import os
+import re
+import shutil
+import tempfile
+
+from dulwich.objects import Blob
+from dulwich.pack import write_pack
+
+from .. import SkipTest
+from ..test_pack import PackTests, a_sha, pack1_sha
+from .utils import require_git_version, run_git_or_fail
+
+_NON_DELTA_RE = re.compile(b"non delta: (?P<non_delta>\\d+) objects")
+
+
+def _git_verify_pack_object_list(output):
+ pack_shas = set()
+ for line in output.splitlines():
+ sha = line[:40]
+ try:
+ binascii.unhexlify(sha)
+ except (TypeError, binascii.Error):
+ continue # non-sha line
+ pack_shas.add(sha)
+ return pack_shas
+
+
+class TestPack(PackTests):
+ """Compatibility tests for reading and writing pack files."""
+
+ def setUp(self):
+ require_git_version((1, 5, 0))
+ super().setUp()
+ self._tempdir = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, self._tempdir)
+
+ def test_copy(self):
+ with self.get_pack(pack1_sha) as origpack:
+ self.assertSucceeds(origpack.index.check)
+ pack_path = os.path.join(self._tempdir, "Elch")
+ write_pack(pack_path, origpack.pack_tuples())
+ output = run_git_or_fail(["verify-pack", "-v", pack_path])
+ orig_shas = {o.id for o in origpack.iterobjects()}
+ self.assertEqual(orig_shas, _git_verify_pack_object_list(output))
+
+ def test_deltas_work(self):
+ with self.get_pack(pack1_sha) as orig_pack:
+ orig_blob = orig_pack[a_sha]
+ new_blob = Blob()
+ new_blob.data = orig_blob.data + b"x"
+ all_to_pack = [(o, None) for o in orig_pack.iterobjects()] + [
+ (new_blob, None)
+ ]
+ pack_path = os.path.join(self._tempdir, "pack_with_deltas")
+ write_pack(pack_path, all_to_pack, deltify=True)
+ output = run_git_or_fail(["verify-pack", "-v", pack_path])
+ self.assertEqual(
+ {x[0].id for x in all_to_pack},
+ _git_verify_pack_object_list(output),
+ )
+ # We specifically made a new blob that should be a delta
+ # against the blob a_sha, so make sure we really got only 3
+ # non-delta objects:
+ got_non_delta = int(_NON_DELTA_RE.search(output).group("non_delta"))
+ self.assertEqual(
+ 3,
+ got_non_delta,
+ "Expected 3 non-delta objects, got %d" % got_non_delta,
+ )
+
+ def test_delta_medium_object(self):
+ # This tests an object set that will have a copy operation
+ # 2**20 in size.
+ with self.get_pack(pack1_sha) as orig_pack:
+ orig_blob = orig_pack[a_sha]
+ new_blob = Blob()
+ new_blob.data = orig_blob.data + (b"x" * 2**20)
+ new_blob_2 = Blob()
+ new_blob_2.data = new_blob.data + b"y"
+ all_to_pack = [
+ *list(orig_pack.pack_tuples()),
+ (new_blob, None),
+ (new_blob_2, None),
+ ]
+ pack_path = os.path.join(self._tempdir, "pack_with_deltas")
+ write_pack(pack_path, all_to_pack, deltify=True)
+ output = run_git_or_fail(["verify-pack", "-v", pack_path])
+ self.assertEqual(
+ {x[0].id for x in all_to_pack},
+ _git_verify_pack_object_list(output),
+ )
+ # We specifically made a new blob that should be a delta
+ # against the blob a_sha, so make sure we really got only 3
+ # non-delta objects:
+ got_non_delta = int(_NON_DELTA_RE.search(output).group("non_delta"))
+ self.assertEqual(
+ 3,
+ got_non_delta,
+ "Expected 3 non-delta objects, got %d" % got_non_delta,
+ )
+ # We expect one object to have a delta chain length of two
+ # (new_blob_2), so let's verify that actually happens:
+ self.assertIn(b"chain length = 2", output)
+
+ # This test is SUPER slow: over 80 seconds on a 2012-era
+ # laptop. This is because SequenceMatcher is worst-case quadratic
+ # on the input size. It's impractical to produce deltas for
+ # objects this large, but it's still worth doing the right thing
+ # when it happens.
+ def test_delta_large_object(self):
+ # This tests an object set that will have a copy operation
+ # 2**25 in size. This is a copy large enough that it requires
+ # two copy operations in git's binary delta format.
+ raise SkipTest("skipping slow, large test")
+ with self.get_pack(pack1_sha) as orig_pack:
+ new_blob = Blob()
+ new_blob.data = "big blob" + ("x" * 2**25)
+ new_blob_2 = Blob()
+ new_blob_2.data = new_blob.data + "y"
+ all_to_pack = [
+ *list(orig_pack.pack_tuples()),
+ (new_blob, None),
+ (new_blob_2, None),
+ ]
+ pack_path = os.path.join(self._tempdir, "pack_with_deltas")
+ write_pack(pack_path, all_to_pack, deltify=True)
+ output = run_git_or_fail(["verify-pack", "-v", pack_path])
+ self.assertEqual(
+ {x[0].id for x in all_to_pack},
+ _git_verify_pack_object_list(output),
+ )
+ # We specifically made a new blob that should be a delta
+ # against the blob a_sha, so make sure we really got only 4
+ # non-delta objects:
+ got_non_delta = int(_NON_DELTA_RE.search(output).group("non_delta"))
+ self.assertEqual(
+ 4,
+ got_non_delta,
+ "Expected 4 non-delta objects, got %d" % got_non_delta,
+ )
blob - /dev/null
blob + 31e155642452e866ea40125fc56c842dcbd1eb41 (mode 644)
--- /dev/null
+++ tests/compat/test_patch.py
+# test_patch.py -- test patch compatibility with CGit
+# Copyright (C) 2019 Boris Feld <boris@comet.ml>
+#
+# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
+# General Public License as public by the Free Software Foundation; version 2.0
+# or (at your option) any later version. You can redistribute it and/or
+# modify it under the terms of either of these two licenses.
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# You should have received a copy of the licenses; if not, see
+# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
+# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
+# License, Version 2.0.
+#
+
+"""Tests related to patch compatibility with CGit."""
+
+import os
+import shutil
+import tempfile
+from io import BytesIO
+
+from dulwich import porcelain
+from dulwich.repo import Repo
+
+from .utils import CompatTestCase, run_git_or_fail
+
+
+class CompatPatchTestCase(CompatTestCase):
+ def setUp(self):
+ super().setUp()
+ self.test_dir = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, self.test_dir)
+ self.repo_path = os.path.join(self.test_dir, "repo")
+ self.repo = Repo.init(self.repo_path, mkdir=True)
+ self.addCleanup(self.repo.close)
+
+ def test_patch_apply(self):
+ # Prepare the repository
+
+ # Create some files and commit them
+ file_list = ["to_exists", "to_modify", "to_delete"]
+ for file in file_list:
+ file_path = os.path.join(self.repo_path, file)
+
+ # Touch the files
+ with open(file_path, "w"):
+ pass
+
+ self.repo.stage(file_list)
+
+ first_commit = self.repo.do_commit(b"The first commit")
+
+ # Make a copy of the repository so we can apply the diff later
+ copy_path = os.path.join(self.test_dir, "copy")
+ shutil.copytree(self.repo_path, copy_path)
+
+ # Do some changes
+ with open(os.path.join(self.repo_path, "to_modify"), "w") as f:
+ f.write("Modified!")
+
+ os.remove(os.path.join(self.repo_path, "to_delete"))
+
+ with open(os.path.join(self.repo_path, "to_add"), "w"):
+ pass
+
+ self.repo.stage(["to_modify", "to_delete", "to_add"])
+
+ second_commit = self.repo.do_commit(b"The second commit")
+
+ # Get the patch
+ first_tree = self.repo[first_commit].tree
+ second_tree = self.repo[second_commit].tree
+
+ outstream = BytesIO()
+ porcelain.diff_tree(
+ self.repo.path, first_tree, second_tree, outstream=outstream
+ )
+
+ # Save it on disk
+ patch_path = os.path.join(self.test_dir, "patch.patch")
+ with open(patch_path, "wb") as patch:
+ patch.write(outstream.getvalue())
+
+ # And try to apply it to the copy directory
+ git_command = ["-C", copy_path, "apply", patch_path]
+ run_git_or_fail(git_command)
+
+ # And now check that the files contents are exactly the same between
+ # the two repositories
+ original_files = set(os.listdir(self.repo_path))
+ new_files = set(os.listdir(copy_path))
+
+ # Check that we have the exact same files in both repositories
+ self.assertEqual(original_files, new_files)
+
+ for file in original_files:
+ if file == ".git":
+ continue
+
+ original_file_path = os.path.join(self.repo_path, file)
+ copy_file_path = os.path.join(copy_path, file)
+
+ self.assertTrue(os.path.isfile(copy_file_path))
+
+ with open(original_file_path, "rb") as original_file:
+ original_content = original_file.read()
+
+ with open(copy_file_path, "rb") as copy_file:
+ copy_content = copy_file.read()
+
+ self.assertEqual(original_content, copy_content)
blob - /dev/null
blob + 5f81e137166827f3e0c0019e93c17d4972c548c4 (mode 644)
--- /dev/null
+++ tests/compat/test_porcelain.py
+# test_porcelain .py -- Tests for dulwich.porcelain/CGit compatibility
+# Copyright (C) 2010 Google, Inc.
+#
+# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
+# General Public License as public by the Free Software Foundation; version 2.0
+# or (at your option) any later version. You can redistribute it and/or
+# modify it under the terms of either of these two licenses.
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# You should have received a copy of the licenses; if not, see
+# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
+# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
+# License, Version 2.0.
+#
+
+"""Compatibility tests for dulwich.porcelain."""
+
+import os
+import platform
+import sys
+from unittest import skipIf
+
+from dulwich import porcelain
+
+from ..test_porcelain import PorcelainGpgTestCase
+from ..utils import build_commit_graph
+from .utils import CompatTestCase, run_git_or_fail
+
+
+@skipIf(
+ platform.python_implementation() == "PyPy" or sys.platform == "win32",
+ "gpgme not easily available or supported on Windows and PyPy",
+)
+class TagCreateSignTestCase(PorcelainGpgTestCase, CompatTestCase):
+ def setUp(self):
+ super().setUp()
+
+ def test_sign(self):
+ # Test that dulwich signatures can be verified by CGit
+ c1, c2, c3 = build_commit_graph(
+ self.repo.object_store, [[1], [2, 1], [3, 1, 2]]
+ )
+ self.repo.refs[b"HEAD"] = c3.id
+ cfg = self.repo.get_config()
+ cfg.set(("user",), "signingKey", PorcelainGpgTestCase.DEFAULT_KEY_ID)
+ self.import_default_key()
+
+ porcelain.tag_create(
+ self.repo.path,
+ b"tryme",
+ b"foo <foo@bar.com>",
+ b"bar",
+ annotated=True,
+ sign=True,
+ )
+
+ run_git_or_fail(
+ [f"--git-dir={self.repo.controldir()}", "tag", "-v", "tryme"],
+ env={"GNUPGHOME": os.environ["GNUPGHOME"]},
+ )
+
+ def test_verify(self):
+ # Test that CGit signatures can be verified by dulwich
+ c1, c2, c3 = build_commit_graph(
+ self.repo.object_store, [[1], [2, 1], [3, 1, 2]]
+ )
+ self.repo.refs[b"HEAD"] = c3.id
+ self.import_default_key()
+
+ run_git_or_fail(
+ [
+ f"--git-dir={self.repo.controldir()}",
+ "tag",
+ "-u",
+ PorcelainGpgTestCase.DEFAULT_KEY_ID,
+ "-m",
+ "foo",
+ "verifyme",
+ ],
+ env={
+ "GNUPGHOME": os.environ["GNUPGHOME"],
+ "GIT_COMMITTER_NAME": "Joe Example",
+ "GIT_COMMITTER_EMAIL": "joe@example.com",
+ },
+ )
+ tag = self.repo[b"refs/tags/verifyme"]
+ self.assertNotEqual(tag.signature, None)
+ tag.verify()
blob - /dev/null
blob + a63bac7adaa6d5d24e4276acc13e9aee8e502824 (mode 644)
--- /dev/null
+++ tests/compat/test_repository.py
+# test_repo.py -- Git repo compatibility tests
+# Copyright (C) 2010 Google, Inc.
+#
+# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
+# General Public License as public by the Free Software Foundation; version 2.0
+# or (at your option) any later version. You can redistribute it and/or
+# modify it under the terms of either of these two licenses.
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# You should have received a copy of the licenses; if not, see
+# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
+# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
+# License, Version 2.0.
+#
+
+"""Compatibility tests for dulwich repositories."""
+
+import os
+import tempfile
+from io import BytesIO
+from itertools import chain
+
+from dulwich.objects import hex_to_sha
+from dulwich.repo import Repo, check_ref_format
+
+from .utils import CompatTestCase, require_git_version, rmtree_ro, run_git_or_fail
+
+
+class ObjectStoreTestCase(CompatTestCase):
+ """Tests for git repository compatibility."""
+
+ def setUp(self):
+ super().setUp()
+ self._repo = self.import_repo("server_new.export")
+
+ def _run_git(self, args):
+ return run_git_or_fail(args, cwd=self._repo.path)
+
+ def _parse_refs(self, output):
+ refs = {}
+ for line in BytesIO(output):
+ fields = line.rstrip(b"\n").split(b" ")
+ self.assertEqual(3, len(fields))
+ refname, type_name, sha = fields
+ check_ref_format(refname[5:])
+ hex_to_sha(sha)
+ refs[refname] = (type_name, sha)
+ return refs
+
+ def _parse_objects(self, output):
+ return {s.rstrip(b"\n").split(b" ")[0] for s in BytesIO(output)}
+
+ def test_bare(self):
+ self.assertTrue(self._repo.bare)
+ self.assertFalse(os.path.exists(os.path.join(self._repo.path, ".git")))
+
+ def test_head(self):
+ output = self._run_git(["rev-parse", "HEAD"])
+ head_sha = output.rstrip(b"\n")
+ hex_to_sha(head_sha)
+ self.assertEqual(head_sha, self._repo.refs[b"HEAD"])
+
+ def test_refs(self):
+ output = self._run_git(
+ ["for-each-ref", "--format=%(refname) %(objecttype) %(objectname)"]
+ )
+ expected_refs = self._parse_refs(output)
+
+ actual_refs = {}
+ for refname, sha in self._repo.refs.as_dict().items():
+ if refname == b"HEAD":
+ continue # handled in test_head
+ obj = self._repo[sha]
+ self.assertEqual(sha, obj.id)
+ actual_refs[refname] = (obj.type_name, obj.id)
+ self.assertEqual(expected_refs, actual_refs)
+
+ # TODO(dborowitz): peeled ref tests
+
+ def _get_loose_shas(self):
+ output = self._run_git(["rev-list", "--all", "--objects", "--unpacked"])
+ return self._parse_objects(output)
+
+ def _get_all_shas(self):
+ output = self._run_git(["rev-list", "--all", "--objects"])
+ return self._parse_objects(output)
+
+ def assertShasMatch(self, expected_shas, actual_shas_iter):
+ actual_shas = set()
+ for sha in actual_shas_iter:
+ obj = self._repo[sha]
+ self.assertEqual(sha, obj.id)
+ actual_shas.add(sha)
+ self.assertEqual(expected_shas, actual_shas)
+
+ def test_loose_objects(self):
+ # TODO(dborowitz): This is currently not very useful since
+ # fast-imported repos only contained packed objects.
+ expected_shas = self._get_loose_shas()
+ self.assertShasMatch(
+ expected_shas, self._repo.object_store._iter_loose_objects()
+ )
+
+ def test_packed_objects(self):
+ expected_shas = self._get_all_shas() - self._get_loose_shas()
+ self.assertShasMatch(
+ expected_shas, chain.from_iterable(self._repo.object_store.packs)
+ )
+
+ def test_all_objects(self):
+ expected_shas = self._get_all_shas()
+ self.assertShasMatch(expected_shas, iter(self._repo.object_store))
+
+
+class WorkingTreeTestCase(ObjectStoreTestCase):
+ """Test for compatibility with git-worktree."""
+
+ min_git_version = (2, 5, 0)
+
+ def create_new_worktree(self, repo_dir, branch):
+ """Create a new worktree using git-worktree.
+
+ Args:
+ repo_dir: The directory of the main working tree.
+ branch: The branch or commit to checkout in the new worktree.
+
+ Returns: The path to the new working tree.
+ """
+ temp_dir = tempfile.mkdtemp()
+ run_git_or_fail(["worktree", "add", temp_dir, branch], cwd=repo_dir)
+ self.addCleanup(rmtree_ro, temp_dir)
+ return temp_dir
+
+ def setUp(self):
+ super().setUp()
+ self._worktree_path = self.create_new_worktree(self._repo.path, "branch")
+ self._worktree_repo = Repo(self._worktree_path)
+ self.addCleanup(self._worktree_repo.close)
+ self._mainworktree_repo = self._repo
+ self._number_of_working_tree = 2
+ self._repo = self._worktree_repo
+
+ def test_refs(self):
+ super().test_refs()
+ self.assertEqual(
+ self._mainworktree_repo.refs.allkeys(), self._repo.refs.allkeys()
+ )
+
+ def test_head_equality(self):
+ self.assertNotEqual(
+ self._repo.refs[b"HEAD"], self._mainworktree_repo.refs[b"HEAD"]
+ )
+
+ def test_bare(self):
+ self.assertFalse(self._repo.bare)
+ self.assertTrue(os.path.isfile(os.path.join(self._repo.path, ".git")))
+
+ def _parse_worktree_list(self, output):
+ worktrees = []
+ for line in BytesIO(output):
+ fields = line.rstrip(b"\n").split()
+ worktrees.append(tuple(f.decode() for f in fields))
+ return worktrees
+
+ def test_git_worktree_list(self):
+ # 'git worktree list' was introduced in 2.7.0
+ require_git_version((2, 7, 0))
+ output = run_git_or_fail(["worktree", "list"], cwd=self._repo.path)
+ worktrees = self._parse_worktree_list(output)
+ self.assertEqual(len(worktrees), self._number_of_working_tree)
+ self.assertEqual(worktrees[0][1], "(bare)")
+ self.assertTrue(os.path.samefile(worktrees[0][0], self._mainworktree_repo.path))
+
+ output = run_git_or_fail(["worktree", "list"], cwd=self._mainworktree_repo.path)
+ worktrees = self._parse_worktree_list(output)
+ self.assertEqual(len(worktrees), self._number_of_working_tree)
+ self.assertEqual(worktrees[0][1], "(bare)")
+ self.assertTrue(os.path.samefile(worktrees[0][0], self._mainworktree_repo.path))
+
+ def test_git_worktree_config(self):
+ """Test that git worktree config parsing matches the git CLI's behavior."""
+ # Set some config value in the main repo using the git CLI
+ require_git_version((2, 7, 0))
+ test_name = "Jelmer"
+ test_email = "jelmer@apache.org"
+ run_git_or_fail(["config", "user.name", test_name], cwd=self._repo.path)
+ run_git_or_fail(["config", "user.email", test_email], cwd=self._repo.path)
+
+ worktree_cfg = self._worktree_repo.get_config()
+ main_cfg = self._repo.get_config()
+
+ # Assert that both the worktree repo and main repo have the same view of the config,
+ # and that the config matches what we set with the git cli
+ self.assertEqual(worktree_cfg, main_cfg)
+ for c in [worktree_cfg, main_cfg]:
+ self.assertEqual(test_name.encode(), c.get((b"user",), b"name"))
+ self.assertEqual(test_email.encode(), c.get((b"user",), b"email"))
+
+ # Read the config values in the worktree with the git cli and assert they match
+ # the dulwich-parsed configs
+ output_name = (
+ run_git_or_fail(["config", "user.name"], cwd=self._mainworktree_repo.path)
+ .decode()
+ .rstrip("\n")
+ )
+ output_email = (
+ run_git_or_fail(["config", "user.email"], cwd=self._mainworktree_repo.path)
+ .decode()
+ .rstrip("\n")
+ )
+ self.assertEqual(test_name, output_name)
+ self.assertEqual(test_email, output_email)
+
+
+class InitNewWorkingDirectoryTestCase(WorkingTreeTestCase):
+ """Test compatibility of Repo.init_new_working_directory."""
+
+ min_git_version = (2, 5, 0)
+
+ def setUp(self):
+ super().setUp()
+ self._other_worktree = self._repo
+ worktree_repo_path = tempfile.mkdtemp()
+ self.addCleanup(rmtree_ro, worktree_repo_path)
+ self._repo = Repo._init_new_working_directory(
+ worktree_repo_path, self._mainworktree_repo
+ )
+ self.addCleanup(self._repo.close)
+ self._number_of_working_tree = 3
+
+ def test_head_equality(self):
+ self.assertEqual(
+ self._repo.refs[b"HEAD"], self._mainworktree_repo.refs[b"HEAD"]
+ )
+
+ def test_bare(self):
+ self.assertFalse(self._repo.bare)
+ self.assertTrue(os.path.isfile(os.path.join(self._repo.path, ".git")))
blob - /dev/null
blob + a518a14316765262e9f25492fe41cc4585fa564d (mode 644)
--- /dev/null
+++ tests/compat/test_server.py
+# test_server.py -- Compatibility tests for git server.
+# Copyright (C) 2010 Google, Inc.
+#
+# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
+# General Public License as public by the Free Software Foundation; version 2.0
+# or (at your option) any later version. You can redistribute it and/or
+# modify it under the terms of either of these two licenses.
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# You should have received a copy of the licenses; if not, see
+# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
+# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
+# License, Version 2.0.
+#
+
+"""Compatibility tests between Dulwich and the cgit server.
+
+Warning: these tests should be fairly stable, but when writing/debugging new
+ tests, deadlocks may freeze the test process such that it cannot be
+ Ctrl-C'ed. On POSIX systems, you can kill the tests with Ctrl-Z, "kill %".
+"""
+
+import os
+import sys
+import threading
+
+from dulwich.server import DictBackend, TCPGitServer
+
+from .. import skipIf
+from .server_utils import NoSideBand64kReceivePackHandler, ServerTests
+from .utils import CompatTestCase, require_git_version
+
+
+@skipIf(sys.platform == "win32", "Broken on windows, with very long fail time.")
+class GitServerTestCase(ServerTests, CompatTestCase):
+ """Tests for client/server compatibility.
+
+ This server test case does not use side-band-64k in git-receive-pack.
+ """
+
+ protocol = "git"
+
+ def _handlers(self):
+ return {b"git-receive-pack": NoSideBand64kReceivePackHandler}
+
+ def _check_server(self, dul_server):
+ receive_pack_handler_cls = dul_server.handlers[b"git-receive-pack"]
+ caps = receive_pack_handler_cls.capabilities()
+ self.assertNotIn(b"side-band-64k", caps)
+
+ def _start_server(self, repo):
+ backend = DictBackend({b"/": repo})
+ dul_server = TCPGitServer(backend, b"localhost", 0, handlers=self._handlers())
+ self._check_server(dul_server)
+ self.addCleanup(dul_server.shutdown)
+ self.addCleanup(dul_server.server_close)
+ threading.Thread(target=dul_server.serve).start()
+ self._server = dul_server
+ _, port = self._server.socket.getsockname()
+ return port
+
+
+@skipIf(sys.platform == "win32", "Broken on windows, with very long fail time.")
+class GitServerSideBand64kTestCase(GitServerTestCase):
+ """Tests for client/server compatibility with side-band-64k support."""
+
+ # side-band-64k in git-receive-pack was introduced in git 1.7.0.2
+ min_git_version = (1, 7, 0, 2)
+
+ def setUp(self):
+ super().setUp()
+ # side-band-64k is broken in the windows client.
+ # https://github.com/msysgit/git/issues/101
+ # Fix has landed for the 1.9.3 release.
+ if os.name == "nt":
+ require_git_version((1, 9, 3))
+
+ def _handlers(self):
+ return None # default handlers include side-band-64k
+
+ def _check_server(self, server):
+ receive_pack_handler_cls = server.handlers[b"git-receive-pack"]
+ caps = receive_pack_handler_cls.capabilities()
+ self.assertIn(b"side-band-64k", caps)
blob - /dev/null
blob + b172bb4a2924fe3ea907522df02769a2fba07263 (mode 644)
--- /dev/null
+++ tests/compat/test_utils.py
+# test_utils.py -- Tests for git compatibility utilities
+# Copyright (C) 2010 Google, Inc.
+#
+# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
+# General Public License as public by the Free Software Foundation; version 2.0
+# or (at your option) any later version. You can redistribute it and/or
+# modify it under the terms of either of these two licenses.
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# You should have received a copy of the licenses; if not, see
+# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
+# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
+# License, Version 2.0.
+#
+
+"""Tests for git compatibility utilities."""
+
+from .. import SkipTest, TestCase
+from . import utils
+
+
+class GitVersionTests(TestCase):
+ def setUp(self):
+ super().setUp()
+ self._orig_run_git = utils.run_git
+ self._version_str = None # tests can override to set stub version
+
+ def run_git(args, **unused_kwargs):
+ self.assertEqual(["--version"], args)
+ return 0, self._version_str, ""
+
+ utils.run_git = run_git
+
+ def tearDown(self):
+ super().tearDown()
+ utils.run_git = self._orig_run_git
+
+ def test_git_version_none(self):
+ self._version_str = b"not a git version"
+ self.assertEqual(None, utils.git_version())
+
+ def test_git_version_3(self):
+ self._version_str = b"git version 1.6.6"
+ self.assertEqual((1, 6, 6, 0), utils.git_version())
+
+ def test_git_version_4(self):
+ self._version_str = b"git version 1.7.0.2"
+ self.assertEqual((1, 7, 0, 2), utils.git_version())
+
+ def test_git_version_extra(self):
+ self._version_str = b"git version 1.7.0.3.295.gd8fa2"
+ self.assertEqual((1, 7, 0, 3), utils.git_version())
+
+ def assertRequireSucceeds(self, required_version):
+ try:
+ utils.require_git_version(required_version)
+ except SkipTest:
+ self.fail()
+
+ def assertRequireFails(self, required_version):
+ self.assertRaises(SkipTest, utils.require_git_version, required_version)
+
+ def test_require_git_version(self):
+ try:
+ self._version_str = b"git version 1.6.6"
+ self.assertRequireSucceeds((1, 6, 6))
+ self.assertRequireSucceeds((1, 6, 6, 0))
+ self.assertRequireSucceeds((1, 6, 5))
+ self.assertRequireSucceeds((1, 6, 5, 99))
+ self.assertRequireFails((1, 7, 0))
+ self.assertRequireFails((1, 7, 0, 2))
+ self.assertRaises(ValueError, utils.require_git_version, (1, 6, 6, 0, 0))
+
+ self._version_str = b"git version 1.7.0.2"
+ self.assertRequireSucceeds((1, 6, 6))
+ self.assertRequireSucceeds((1, 6, 6, 0))
+ self.assertRequireSucceeds((1, 7, 0))
+ self.assertRequireSucceeds((1, 7, 0, 2))
+ self.assertRequireFails((1, 7, 0, 3))
+ self.assertRequireFails((1, 7, 1))
+ except SkipTest as e:
+ # This test is designed to catch all SkipTest exceptions.
+ self.fail("Test unexpectedly skipped: %s" % e)
blob - /dev/null
blob + 78f4e43ac0e6cd5bae9fcb2c3a955a6ff41f09a9 (mode 644)
--- /dev/null
+++ tests/compat/test_web.py
+# test_web.py -- Compatibility tests for the git web server.
+# Copyright (C) 2010 Google, Inc.
+#
+# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
+# General Public License as public by the Free Software Foundation; version 2.0
+# or (at your option) any later version. You can redistribute it and/or
+# modify it under the terms of either of these two licenses.
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# You should have received a copy of the licenses; if not, see
+# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
+# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
+# License, Version 2.0.
+#
+
+"""Compatibility tests between Dulwich and the cgit HTTP server.
+
+warning: these tests should be fairly stable, but when writing/debugging new
+ tests, deadlocks may freeze the test process such that it cannot be
+ Ctrl-C'ed. On POSIX systems, you can kill the tests with Ctrl-Z, "kill %".
+"""
+
+import sys
+import threading
+from typing import Tuple
+from wsgiref import simple_server
+
+from dulwich.server import DictBackend, ReceivePackHandler, UploadPackHandler
+from dulwich.web import (
+ HTTPGitApplication,
+ WSGIRequestHandlerLogger,
+ WSGIServerLogger,
+ make_wsgi_chain,
+)
+
+from .. import SkipTest, skipIf
+from .server_utils import NoSideBand64kReceivePackHandler, ServerTests
+from .utils import CompatTestCase
+
+
+@skipIf(sys.platform == "win32", "Broken on windows, with very long fail time.")
+class WebTests(ServerTests):
+ """Base tests for web server tests.
+
+ Contains utility and setUp/tearDown methods, but does non inherit from
+ TestCase so tests are not automatically run.
+ """
+
+ protocol = "http"
+
+ def _start_server(self, repo):
+ backend = DictBackend({"/": repo})
+ app = self._make_app(backend)
+ dul_server = simple_server.make_server(
+ "localhost",
+ 0,
+ app,
+ server_class=WSGIServerLogger,
+ handler_class=WSGIRequestHandlerLogger,
+ )
+ self.addCleanup(dul_server.shutdown)
+ self.addCleanup(dul_server.server_close)
+ threading.Thread(target=dul_server.serve_forever).start()
+ self._server = dul_server
+ _, port = dul_server.socket.getsockname()
+ return port
+
+
+@skipIf(sys.platform == "win32", "Broken on windows, with very long fail time.")
+class SmartWebTestCase(WebTests, CompatTestCase):
+ """Test cases for smart HTTP server.
+
+ This server test case does not use side-band-64k in git-receive-pack.
+ """
+
+ min_git_version: Tuple[int, ...] = (1, 6, 6)
+
+ def _handlers(self):
+ return {b"git-receive-pack": NoSideBand64kReceivePackHandler}
+
+ def _check_app(self, app):
+ receive_pack_handler_cls = app.handlers[b"git-receive-pack"]
+ caps = receive_pack_handler_cls.capabilities()
+ self.assertNotIn(b"side-band-64k", caps)
+
+ def _make_app(self, backend):
+ app = make_wsgi_chain(backend, handlers=self._handlers())
+ to_check = app
+ # peel back layers until we're at the base application
+ while not issubclass(to_check.__class__, HTTPGitApplication):
+ to_check = to_check.app
+ self._check_app(to_check)
+ return app
+
+
+def patch_capabilities(handler, caps_removed):
+ # Patch a handler's capabilities by specifying a list of them to be
+ # removed, and return the original classmethod for restoration.
+ original_capabilities = handler.capabilities
+ filtered_capabilities = [
+ i for i in original_capabilities() if i not in caps_removed
+ ]
+
+ def capabilities(cls):
+ return filtered_capabilities
+
+ handler.capabilities = classmethod(capabilities)
+ return original_capabilities
+
+
+@skipIf(sys.platform == "win32", "Broken on windows, with very long fail time.")
+class SmartWebSideBand64kTestCase(SmartWebTestCase):
+ """Test cases for smart HTTP server with side-band-64k support."""
+
+ # side-band-64k in git-receive-pack was introduced in git 1.7.0.2
+ min_git_version = (1, 7, 0, 2)
+
+ def setUp(self):
+ self.o_uph_cap = patch_capabilities(UploadPackHandler, (b"no-done",))
+ self.o_rph_cap = patch_capabilities(ReceivePackHandler, (b"no-done",))
+ super().setUp()
+
+ def tearDown(self):
+ super().tearDown()
+ UploadPackHandler.capabilities = self.o_uph_cap
+ ReceivePackHandler.capabilities = self.o_rph_cap
+
+ def _handlers(self):
+ return None # default handlers include side-band-64k
+
+ def _check_app(self, app):
+ receive_pack_handler_cls = app.handlers[b"git-receive-pack"]
+ caps = receive_pack_handler_cls.capabilities()
+ self.assertIn(b"side-band-64k", caps)
+ self.assertNotIn(b"no-done", caps)
+
+
+class SmartWebSideBand64kNoDoneTestCase(SmartWebTestCase):
+ """Test cases for smart HTTP server with side-band-64k and no-done
+ support.
+ """
+
+ # no-done was introduced in git 1.7.4
+ min_git_version = (1, 7, 4)
+
+ def _handlers(self):
+ return None # default handlers include side-band-64k
+
+ def _check_app(self, app):
+ receive_pack_handler_cls = app.handlers[b"git-receive-pack"]
+ caps = receive_pack_handler_cls.capabilities()
+ self.assertIn(b"side-band-64k", caps)
+ self.assertIn(b"no-done", caps)
+
+
+@skipIf(sys.platform == "win32", "Broken on windows, with very long fail time.")
+class DumbWebTestCase(WebTests, CompatTestCase):
+ """Test cases for dumb HTTP server."""
+
+ def _make_app(self, backend):
+ return make_wsgi_chain(backend, dumb=True)
+
+ def test_push_to_dulwich(self):
+ # Note: remove this if dulwich implements dumb web pushing.
+ raise SkipTest("Dumb web pushing not supported.")
+
+ def test_push_to_dulwich_remove_branch(self):
+ # Note: remove this if dumb pushing is supported
+ raise SkipTest("Dumb web pushing not supported.")
+
+ def test_new_shallow_clone_from_dulwich(self):
+ # Note: remove this if C git and dulwich implement dumb web shallow
+ # clones.
+ raise SkipTest("Dumb web shallow cloning not supported.")
+
+ def test_shallow_clone_from_git_is_identical(self):
+ # Note: remove this if C git and dulwich implement dumb web shallow
+ # clones.
+ raise SkipTest("Dumb web shallow cloning not supported.")
+
+ def test_fetch_same_depth_into_shallow_clone_from_dulwich(self):
+ # Note: remove this if C git and dulwich implement dumb web shallow
+ # clones.
+ raise SkipTest("Dumb web shallow cloning not supported.")
+
+ def test_fetch_full_depth_into_shallow_clone_from_dulwich(self):
+ # Note: remove this if C git and dulwich implement dumb web shallow
+ # clones.
+ raise SkipTest("Dumb web shallow cloning not supported.")
+
+ def test_push_to_dulwich_issue_88_standard(self):
+ raise SkipTest("Dumb web pushing not supported.")
blob - /dev/null
blob + 69cecedee29b312ccf42c0278e87e79e5269f285 (mode 644)
--- /dev/null
+++ tests/compat/utils.py
+# utils.py -- Git compatibility utilities
+# Copyright (C) 2010 Google, Inc.
+#
+# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
+# General Public License as public by the Free Software Foundation; version 2.0
+# or (at your option) any later version. You can redistribute it and/or
+# modify it under the terms of either of these two licenses.
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# You should have received a copy of the licenses; if not, see
+# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
+# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
+# License, Version 2.0.
+#
+
+"""Utilities for interacting with cgit."""
+
+import errno
+import functools
+import os
+import shutil
+import socket
+import stat
+import subprocess
+import sys
+import tempfile
+import time
+from typing import Tuple
+
+from dulwich.protocol import TCP_GIT_PORT
+from dulwich.repo import Repo
+
+from .. import SkipTest, TestCase
+
+_DEFAULT_GIT = "git"
+_VERSION_LEN = 4
+_REPOS_DATA_DIR = os.path.abspath(
+ os.path.join(
+ os.path.dirname(__file__), os.pardir, os.pardir, "testdata", "repos"
+ )
+)
+
+
+def git_version(git_path=_DEFAULT_GIT):
+ """Attempt to determine the version of git currently installed.
+
+ Args:
+ git_path: Path to the git executable; defaults to the version in
+ the system path.
+ Returns: A tuple of ints of the form (major, minor, point, sub-point), or
+ None if no git installation was found.
+ """
+ try:
+ output = run_git_or_fail(["--version"], git_path=git_path)
+ except OSError:
+ return None
+ version_prefix = b"git version "
+ if not output.startswith(version_prefix):
+ return None
+
+ parts = output[len(version_prefix) :].split(b".")
+ nums = []
+ for part in parts:
+ try:
+ nums.append(int(part))
+ except ValueError:
+ break
+
+ while len(nums) < _VERSION_LEN:
+ nums.append(0)
+ return tuple(nums[:_VERSION_LEN])
+
+
+def require_git_version(required_version, git_path=_DEFAULT_GIT):
+ """Require git version >= version, or skip the calling test.
+
+ Args:
+ required_version: A tuple of ints of the form (major, minor, point,
+ sub-point); omitted components default to 0.
+ git_path: Path to the git executable; defaults to the version in
+ the system path.
+
+ Raises:
+ ValueError: if the required version tuple has too many parts.
+ SkipTest: if no suitable git version was found at the given path.
+ """
+ found_version = git_version(git_path=git_path)
+ if found_version is None:
+ raise SkipTest(f"Test requires git >= {required_version}, but c git not found")
+
+ if len(required_version) > _VERSION_LEN:
+ raise ValueError(
+ "Invalid version tuple %s, expected %i parts"
+ % (required_version, _VERSION_LEN)
+ )
+
+ required_version = list(required_version)
+ while len(found_version) < len(required_version):
+ required_version.append(0)
+ required_version = tuple(required_version)
+
+ if found_version < required_version:
+ required_version = ".".join(map(str, required_version))
+ found_version = ".".join(map(str, found_version))
+ raise SkipTest(
+ f"Test requires git >= {required_version}, found {found_version}"
+ )
+
+
+def run_git(
+ args,
+ git_path=_DEFAULT_GIT,
+ input=None,
+ capture_stdout=False,
+ capture_stderr=False,
+ **popen_kwargs,
+):
+ """Run a git command.
+
+ Input is piped from the input parameter and output is sent to the standard
+ streams, unless capture_stdout is set.
+
+ Args:
+ args: A list of args to the git command.
+ git_path: Path to to the git executable.
+ input: Input data to be sent to stdin.
+ capture_stdout: Whether to capture and return stdout.
+ popen_kwargs: Additional kwargs for subprocess.Popen;
+ stdin/stdout args are ignored.
+ Returns: A tuple of (returncode, stdout contents, stderr contents).
+ If capture_stdout is False, None will be returned as stdout contents.
+ If capture_stderr is False, None will be returned as stderr contents.
+
+ Raises:
+ OSError: if the git executable was not found.
+ """
+ env = popen_kwargs.pop("env", {})
+ env["LC_ALL"] = env["LANG"] = "C"
+ env["PATH"] = os.getenv("PATH")
+
+ args = [git_path, *args]
+ popen_kwargs["stdin"] = subprocess.PIPE
+ if capture_stdout:
+ popen_kwargs["stdout"] = subprocess.PIPE
+ else:
+ popen_kwargs.pop("stdout", None)
+ if capture_stderr:
+ popen_kwargs["stderr"] = subprocess.PIPE
+ else:
+ popen_kwargs.pop("stderr", None)
+ p = subprocess.Popen(args, env=env, **popen_kwargs)
+ stdout, stderr = p.communicate(input=input)
+ return (p.returncode, stdout, stderr)
+
+
+def run_git_or_fail(args, git_path=_DEFAULT_GIT, input=None, **popen_kwargs):
+ """Run a git command, capture stdout/stderr, and fail if git fails."""
+ if "stderr" not in popen_kwargs:
+ popen_kwargs["stderr"] = subprocess.STDOUT
+ returncode, stdout, stderr = run_git(
+ args,
+ git_path=git_path,
+ input=input,
+ capture_stdout=True,
+ capture_stderr=True,
+ **popen_kwargs,
+ )
+ if returncode != 0:
+ raise AssertionError(
+ "git with args %r failed with %d: stdout=%r stderr=%r"
+ % (args, returncode, stdout, stderr)
+ )
+ return stdout
+
+
+def import_repo_to_dir(name):
+ """Import a repo from a fast-export file in a temporary directory.
+
+ These are used rather than binary repos for compat tests because they are
+ more compact and human-editable, and we already depend on git.
+
+ Args:
+ name: The name of the repository export file, relative to
+ dulwich/tests/data/repos.
+ Returns: The path to the imported repository.
+ """
+ temp_dir = tempfile.mkdtemp()
+ export_path = os.path.join(_REPOS_DATA_DIR, name)
+ temp_repo_dir = os.path.join(temp_dir, name)
+ export_file = open(export_path, "rb")
+ run_git_or_fail(["init", "--quiet", "--bare", temp_repo_dir])
+ run_git_or_fail(["fast-import"], input=export_file.read(), cwd=temp_repo_dir)
+ export_file.close()
+ return temp_repo_dir
+
+
+def check_for_daemon(limit=10, delay=0.1, timeout=0.1, port=TCP_GIT_PORT):
+ """Check for a running TCP daemon.
+
+ Defaults to checking 10 times with a delay of 0.1 sec between tries.
+
+ Args:
+ limit: Number of attempts before deciding no daemon is running.
+ delay: Delay between connection attempts.
+ timeout: Socket timeout for connection attempts.
+ port: Port on which we expect the daemon to appear.
+ Returns: A boolean, true if a daemon is running on the specified port,
+ false if not.
+ """
+ for _ in range(limit):
+ time.sleep(delay)
+ s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ s.settimeout(delay)
+ try:
+ s.connect(("localhost", port))
+ return True
+ except socket.timeout:
+ pass
+ except OSError as e:
+ if getattr(e, "errno", False) and e.errno != errno.ECONNREFUSED:
+ raise
+ elif e.args[0] != errno.ECONNREFUSED:
+ raise
+ finally:
+ s.close()
+ return False
+
+
+class CompatTestCase(TestCase):
+ """Test case that requires git for compatibility checks.
+
+ Subclasses can change the git version required by overriding
+ min_git_version.
+ """
+
+ min_git_version: Tuple[int, ...] = (1, 5, 0)
+
+ def setUp(self):
+ super().setUp()
+ require_git_version(self.min_git_version)
+
+ def assertObjectStoreEqual(self, store1, store2):
+ self.assertEqual(sorted(set(store1)), sorted(set(store2)))
+
+ def assertReposEqual(self, repo1, repo2):
+ self.assertEqual(repo1.get_refs(), repo2.get_refs())
+ self.assertObjectStoreEqual(repo1.object_store, repo2.object_store)
+
+ def assertReposNotEqual(self, repo1, repo2):
+ refs1 = repo1.get_refs()
+ objs1 = set(repo1.object_store)
+ refs2 = repo2.get_refs()
+ objs2 = set(repo2.object_store)
+ self.assertFalse(refs1 == refs2 and objs1 == objs2)
+
+ def import_repo(self, name):
+ """Import a repo from a fast-export file in a temporary directory.
+
+ Args:
+ name: The name of the repository export file, relative to
+ dulwich/tests/data/repos.
+ Returns: An initialized Repo object that lives in a temporary
+ directory.
+ """
+ path = import_repo_to_dir(name)
+ repo = Repo(path)
+
+ def cleanup():
+ repo.close()
+ rmtree_ro(os.path.dirname(path.rstrip(os.sep)))
+
+ self.addCleanup(cleanup)
+ return repo
+
+
+if sys.platform == "win32":
+
+ def remove_ro(action, name, exc):
+ os.chmod(name, stat.S_IWRITE)
+ os.remove(name)
+
+ rmtree_ro = functools.partial(shutil.rmtree, onerror=remove_ro)
+else:
+ rmtree_ro = shutil.rmtree
blob - /dev/null
blob + 92b1e5b9499442ef432148240f1423966855248f (mode 644)
--- /dev/null
+++ tests/contrib/__init__.py
+# __init__.py -- Contrib module for Dulwich
+# Copyright (C) 2014 Jelmer Vernooij <jelmer@jelmer.uk>
+#
+# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
+# General Public License as public by the Free Software Foundation; version 2.0
+# or (at your option) any later version. You can redistribute it and/or
+# modify it under the terms of either of these two licenses.
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# You should have received a copy of the licenses; if not, see
+# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
+# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
+# License, Version 2.0.
+#
+
+def test_suite():
+ import unittest
+
+ names = [
+ "paramiko_vendor",
+ "release_robot",
+ "swift",
+ ]
+ module_names = ["tests.contrib.test_" + name for name in names]
+ loader = unittest.TestLoader()
+ return loader.loadTestsFromNames(module_names)
blob - /dev/null
blob + 0e12e5d77de7ddb1279e9e70b29ef9d098e4b734 (mode 644)
--- /dev/null
+++ tests/contrib/test_paramiko_vendor.py
+# test_paramiko_vendor.py
+#
+# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
+# General Public License as public by the Free Software Foundation; version 2.0
+# or (at your option) any later version. You can redistribute it and/or
+# modify it under the terms of either of these two licenses.
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# You should have received a copy of the licenses; if not, see
+# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
+# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
+# License, Version 2.0.
+#
+
+"""Tests for paramiko_vendor."""
+
+import socket
+import threading
+from io import StringIO
+from unittest import skipIf
+
+from .. import TestCase
+
+try:
+ import paramiko
+except ImportError:
+ has_paramiko = False
+else:
+ has_paramiko = True
+ from dulwich.contrib.paramiko_vendor import ParamikoSSHVendor
+
+ class Server(paramiko.ServerInterface):
+ """http://docs.paramiko.org/en/2.4/api/server.html."""
+
+ def __init__(self, commands, *args, **kwargs) -> None:
+ super().__init__(*args, **kwargs)
+ self.commands = commands
+
+ def check_channel_exec_request(self, channel, command):
+ self.commands.append(command)
+ return True
+
+ def check_auth_password(self, username, password):
+ if username == USER and password == PASSWORD:
+ return paramiko.AUTH_SUCCESSFUL
+ return paramiko.AUTH_FAILED
+
+ def check_auth_publickey(self, username, key):
+ pubkey = paramiko.RSAKey.from_private_key(StringIO(CLIENT_KEY))
+ if username == USER and key == pubkey:
+ return paramiko.AUTH_SUCCESSFUL
+ return paramiko.AUTH_FAILED
+
+ def check_channel_request(self, kind, chanid):
+ if kind == "session":
+ return paramiko.OPEN_SUCCEEDED
+ return paramiko.OPEN_FAILED_ADMINISTRATIVELY_PROHIBITED
+
+ def get_allowed_auths(self, username):
+ return "password,publickey"
+
+
+USER = "testuser"
+PASSWORD = "test"
+SERVER_KEY = """\
+-----BEGIN RSA PRIVATE KEY-----
+MIIEpAIBAAKCAQEAy/L1sSYAzxsMprtNXW4u/1jGXXkQmQ2xtmKVlR+RlIL3a1BH
+bzTpPlZyjltAAwzIP8XRh0iJFKz5y3zSQChhX47ZGN0NvQsVct8R+YwsUonwfAJ+
+JN0KBKKvC8fPHlzqBr3gX+ZxqsFH934tQ6wdQPH5eQWtdM8L826lMsH1737uyTGk
++mCSDjL3c6EzY83g7qhkJU2R4qbi6ne01FaWADzG8sOzXnHT+xpxtk8TTT8yCVUY
+MmBNsSoA/ka3iWz70ghB+6Xb0WpFJZXWq1oYovviPAfZGZSrxBZMxsWMye70SdLl
+TqsBEt0+miIcm9s0fvjWvQuhaHX6mZs5VO4r5QIDAQABAoIBAGYqeYWaYgFdrYLA
+hUrubUCg+g3NHdFuGL4iuIgRXl4lFUh+2KoOuWDu8Uf60iA1AQNhV0sLvQ/Mbv3O
+s4xMLisuZfaclctDiCUZNenqnDFkxEF7BjH1QJV94W5nU4wEQ3/JEmM4D2zYkfKb
+FJW33JeyH6TOgUvohDYYEU1R+J9V8qA243p+ui1uVtNI6Pb0TXJnG5y9Ny4vkSWH
+Fi0QoMPR1r9xJ4SEearGzA/crb4SmmDTKhGSoMsT3d5ATieLmwcS66xWz8w4oFGJ
+yzDq24s4Fp9ccNjMf/xR8XRiekJv835gjEqwF9IXyvgOaq6XJ1iCqGPFDKa25nui
+JnEstOkCgYEA/ZXk7aIanvdeJlTqpX578sJfCnrXLydzE8emk1b7+5mrzGxQ4/pM
+PBQs2f8glT3t0O0mRX9NoRqnwrid88/b+cY4NCOICFZeasX336/gYQxyVeRLJS6Z
+hnGEQqry8qS7PdKAyeHMNmZFrUh4EiHiObymEfQS+mkRUObn0cGBTw8CgYEAzeQU
+D2baec1DawjppKaRynAvWjp+9ry1lZx9unryKVRwjRjkEpw+b3/+hdaF1IvsVSce
+cNj+6W2guZ2tyHuPhZ64/4SJVyE2hKDSKD4xTb2nVjsMeN0bLD2UWXC9mwbx8nWa
+2tmtUZ7a/okQb2cSdosJinRewLNqXIsBXamT1csCgYEA0cXb2RCOQQ6U3dTFPx4A
+3vMXuA2iUKmrsqMoEx6T2LBow/Sefdkik1iFOdipVYwjXP+w9zC2QR1Rxez/DR/X
+8ymceNUjxPHdrSoTQQG29dFcC92MpDeGXQcuyA+uZjcLhbrLOzYEvsOfxBb87NMG
+14hNQPDNekTMREafYo9WrtUCgYAREK54+FVzcwf7fymedA/xb4r9N4v+d3W1iNsC
+8d3Qfyc1CrMct8aVB07ZWQaOr2pPRIbJY7L9NhD0UZVt4I/sy1MaGqonhqE2LP4+
+R6legDG2e/50ph7yc8gwAaA1kUXMiuLi8Nfkw/3yyvmJwklNegi4aRzRbA2Mzhi2
+4q9WMQKBgQCb0JNyxHG4pvLWCF/j0Sm1FfvrpnqSv5678n1j4GX7Ka/TubOK1Y4K
+U+Oib7dKa/zQMWehVFNTayrsq6bKVZ6q7zG+IHiRLw4wjeAxREFH6WUjDrn9vl2l
+D48DKbBuBwuVOJWyq3qbfgJXojscgNQklrsPdXVhDwOF0dYxP89HnA==
+-----END RSA PRIVATE KEY-----"""
+CLIENT_KEY = """\
+-----BEGIN RSA PRIVATE KEY-----
+MIIEpAIBAAKCAQEAxvREKSElPOm/0z/nPO+j5rk2tjdgGcGc7We1QZ6TRXYLu7nN
+GeEFIL4p8N1i6dmB+Eydt7xqCU79MWD6Yy4prFe1+/K1wCDUxIbFMxqQcX5zjJzd
+i8j8PbcaUlVhP/OkjtkSxrXaGDO1BzfdV4iEBtTV/2l3zmLKJlt3jnOHLczP24CB
+DTQKp3rKshbRefzot9Y+wnaK692RsYgsyo9YEP0GyWKG9topCHk13r46J6vGLeuj
+ryUKqmbLJkzbJbIcEqwTDo5iHaCVqaMr5Hrb8BdMucSseqZQJsXSd+9tdRcIblUQ
+38kZjmFMm4SFbruJcpZCNM2wNSZPIRX+3eiwNwIDAQABAoIBAHSacOBSJsr+jIi5
+KUOTh9IPtzswVUiDKwARCjB9Sf8p4lKR4N1L/n9kNJyQhApeikgGT2GCMftmqgoo
+tlculQoHFgemBlOmak0MV8NNzF5YKEy/GzF0CDH7gJfEpoyetVFrdA+2QS5yD6U9
+XqKQxiBi2VEqdScmyyeT8AwzNYTnPeH/DOEcnbdRjqiy/CD79F49CQ1lX1Fuqm0K
+I7BivBH1xo/rVnUP4F+IzocDqoga+Pjdj0LTXIgJlHQDSbhsQqWujWQDDuKb+MAw
+sNK4Zf8ErV3j1PyA7f/M5LLq6zgstkW4qikDHo4SpZX8kFOO8tjqb7kujj7XqeaB
+CxqrOTECgYEA73uWkrohcmDJ4KqbuL3tbExSCOUiaIV+sT1eGPNi7GCmXD4eW5Z4
+75v2IHymW83lORSu/DrQ6sKr1nkuRpqr2iBzRmQpl/H+wahIhBXlnJ25uUjDsuPO
+1Pq2LcmyD+jTxVnmbSe/q7O09gZQw3I6H4+BMHmpbf8tC97lqimzpJ0CgYEA1K0W
+ZL70Xtn9quyHvbtae/BW07NZnxvUg4UaVIAL9Zu34JyplJzyzbIjrmlDbv6aRogH
+/KtuG9tfbf55K/jjqNORiuRtzt1hUN1ye4dyW7tHx2/7lXdlqtyK40rQl8P0kqf8
+zaS6BqjnobgSdSpg32rWoL/pcBHPdJCJEgQ8zeMCgYEA0/PK8TOhNIzrP1dgGSKn
+hkkJ9etuB5nW5mEM7gJDFDf6JPupfJ/xiwe6z0fjKK9S57EhqgUYMB55XYnE5iIw
+ZQ6BV9SAZ4V7VsRs4dJLdNC3tn/rDGHJBgCaym2PlbsX6rvFT+h1IC8dwv0V79Ui
+Ehq9WTzkMoE8yhvNokvkPZUCgYEAgBAFxv5xGdh79ftdtXLmhnDvZ6S8l6Fjcxqo
+Ay/jg66Tp43OU226iv/0mmZKM8Dd1xC8dnon4GBVc19jSYYiWBulrRPlx0Xo/o+K
+CzZBN1lrXH1i6dqufpc0jq8TMf/N+q1q/c1uMupsKCY1/xVYpc+ok71b7J7c49zQ
+nOeuUW8CgYA9Infooy65FTgbzca0c9kbCUBmcAPQ2ItH3JcPKWPQTDuV62HcT00o
+fZdIV47Nez1W5Clk191RMy8TXuqI54kocciUWpThc6j44hz49oUueb8U4bLcEHzA
+WxtWBWHwxfSmqgTXilEA3ALJp0kNolLnEttnhENwJpZHlqtes0ZA4w==
+-----END RSA PRIVATE KEY-----"""
+
+
+@skipIf(not has_paramiko, "paramiko is not installed")
+class ParamikoSSHVendorTests(TestCase):
+ def setUp(self):
+ import paramiko.transport
+
+ # re-enable server functionality for tests
+ if hasattr(paramiko.transport, "SERVER_DISABLED_BY_GENTOO"):
+ paramiko.transport.SERVER_DISABLED_BY_GENTOO = False
+
+ self.commands = []
+ socket.setdefaulttimeout(10)
+ self.addCleanup(socket.setdefaulttimeout, None)
+ self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ self.socket.bind(("127.0.0.1", 0))
+ self.socket.listen(5)
+ self.addCleanup(self.socket.close)
+ self.port = self.socket.getsockname()[1]
+ self.thread = threading.Thread(target=self._run)
+ self.thread.start()
+
+ def tearDown(self):
+ self.thread.join()
+
+ def _run(self):
+ try:
+ conn, addr = self.socket.accept()
+ except OSError:
+ return False
+ self.transport = paramiko.Transport(conn)
+ self.addCleanup(self.transport.close)
+ host_key = paramiko.RSAKey.from_private_key(StringIO(SERVER_KEY))
+ self.transport.add_server_key(host_key)
+ server = Server(self.commands)
+ self.transport.start_server(server=server)
+
+ def test_run_command_password(self):
+ vendor = ParamikoSSHVendor(
+ allow_agent=False,
+ look_for_keys=False,
+ )
+ vendor.run_command(
+ "127.0.0.1",
+ "test_run_command_password",
+ username=USER,
+ port=self.port,
+ password=PASSWORD,
+ )
+
+ self.assertIn(b"test_run_command_password", self.commands)
+
+ def test_run_command_with_privkey(self):
+ key = paramiko.RSAKey.from_private_key(StringIO(CLIENT_KEY))
+
+ vendor = ParamikoSSHVendor(
+ allow_agent=False,
+ look_for_keys=False,
+ )
+ vendor.run_command(
+ "127.0.0.1",
+ "test_run_command_with_privkey",
+ username=USER,
+ port=self.port,
+ pkey=key,
+ )
+
+ self.assertIn(b"test_run_command_with_privkey", self.commands)
+
+ def test_run_command_data_transfer(self):
+ vendor = ParamikoSSHVendor(
+ allow_agent=False,
+ look_for_keys=False,
+ )
+ con = vendor.run_command(
+ "127.0.0.1",
+ "test_run_command_data_transfer",
+ username=USER,
+ port=self.port,
+ password=PASSWORD,
+ )
+
+ self.assertIn(b"test_run_command_data_transfer", self.commands)
+
+ channel = self.transport.accept(5)
+ channel.send(b"stdout\n")
+ channel.send_stderr(b"stderr\n")
+ channel.close()
+
+ # Fixme: it's return false
+ # self.assertTrue(con.can_read())
+
+ self.assertEqual(b"stdout\n", con.read(4096))
+
+ # Fixme: it's return empty string
+ # self.assertEqual(b'stderr\n', con.read_stderr(4096))
blob - /dev/null
blob + 4f6622b40b6cf24486cb3142832ad6d37ab5c914 (mode 644)
--- /dev/null
+++ tests/contrib/test_release_robot.py
+# release_robot.py
+#
+# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
+# General Public License as public by the Free Software Foundation; version 2.0
+# or (at your option) any later version. You can redistribute it and/or
+# modify it under the terms of either of these two licenses.
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# You should have received a copy of the licenses; if not, see
+# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
+# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
+# License, Version 2.0.
+#
+
+"""Tests for release_robot."""
+
+import datetime
+import os
+import re
+import shutil
+import tempfile
+import time
+import unittest
+from typing import ClassVar, Dict, List, Optional, Tuple
+
+from dulwich.contrib import release_robot
+from dulwich.repo import Repo
+
+from ..utils import make_commit, make_tag
+
+BASEDIR = os.path.abspath(os.path.dirname(__file__)) # this directory
+
+
+def gmtime_to_datetime(gmt):
+ return datetime.datetime(*time.gmtime(gmt)[:6])
+
+
+class TagPatternTests(unittest.TestCase):
+ """test tag patterns."""
+
+ def test_tag_pattern(self):
+ """Test tag patterns."""
+ test_cases = {
+ "0.3": "0.3",
+ "v0.3": "0.3",
+ "release0.3": "0.3",
+ "Release-0.3": "0.3",
+ "v0.3rc1": "0.3rc1",
+ "v0.3-rc1": "0.3-rc1",
+ "v0.3-rc.1": "0.3-rc.1",
+ "version 0.3": "0.3",
+ "version_0.3_rc_1": "0.3_rc_1",
+ "v1": "1",
+ "0.3rc1": "0.3rc1",
+ }
+ for testcase, version in test_cases.items():
+ matches = re.match(release_robot.PATTERN, testcase)
+ self.assertEqual(matches.group(1), version)
+
+
+class GetRecentTagsTest(unittest.TestCase):
+ """test get recent tags."""
+
+ # Git repo for dulwich project
+ test_repo = os.path.join(BASEDIR, "dulwich_test_repo.zip")
+ committer = b"Mark Mikofski <mark.mikofski@sunpowercorp.com>"
+ test_tags: ClassVar[List[bytes]] = [b"v0.1a", b"v0.1"]
+ tag_test_data: ClassVar[
+ Dict[bytes, Tuple[int, bytes, Optional[Tuple[int, bytes]]]]
+ ] = {
+ test_tags[0]: (1484788003, b"3" * 40, None),
+ test_tags[1]: (1484788314, b"1" * 40, (1484788401, b"2" * 40)),
+ }
+
+ @classmethod
+ def setUpClass(cls):
+ cls.projdir = tempfile.mkdtemp() # temporary project directory
+ cls.repo = Repo.init(cls.projdir) # test repo
+ obj_store = cls.repo.object_store # test repo object store
+ # commit 1 ('2017-01-19T01:06:43')
+ cls.c1 = make_commit(
+ id=cls.tag_test_data[cls.test_tags[0]][1],
+ commit_time=cls.tag_test_data[cls.test_tags[0]][0],
+ message=b"unannotated tag",
+ author=cls.committer,
+ )
+ obj_store.add_object(cls.c1)
+ # tag 1: unannotated
+ cls.t1 = cls.test_tags[0]
+ cls.repo[b"refs/tags/" + cls.t1] = cls.c1.id # add unannotated tag
+ # commit 2 ('2017-01-19T01:11:54')
+ cls.c2 = make_commit(
+ id=cls.tag_test_data[cls.test_tags[1]][1],
+ commit_time=cls.tag_test_data[cls.test_tags[1]][0],
+ message=b"annotated tag",
+ parents=[cls.c1.id],
+ author=cls.committer,
+ )
+ obj_store.add_object(cls.c2)
+ # tag 2: annotated ('2017-01-19T01:13:21')
+ cls.t2 = make_tag(
+ cls.c2,
+ id=cls.tag_test_data[cls.test_tags[1]][2][1],
+ name=cls.test_tags[1],
+ tag_time=cls.tag_test_data[cls.test_tags[1]][2][0],
+ )
+ obj_store.add_object(cls.t2)
+ cls.repo[b"refs/heads/master"] = cls.c2.id
+ cls.repo[b"refs/tags/" + cls.t2.name] = cls.t2.id # add annotated tag
+
+ @classmethod
+ def tearDownClass(cls):
+ cls.repo.close()
+ shutil.rmtree(cls.projdir)
+
+ def test_get_recent_tags(self):
+ """Test get recent tags."""
+ tags = release_robot.get_recent_tags(self.projdir) # get test tags
+ for tag, metadata in tags:
+ tag = tag.encode("utf-8")
+ test_data = self.tag_test_data[tag] # test data tag
+ # test commit date, id and author name
+ self.assertEqual(metadata[0], gmtime_to_datetime(test_data[0]))
+ self.assertEqual(metadata[1].encode("utf-8"), test_data[1])
+ self.assertEqual(metadata[2].encode("utf-8"), self.committer)
+ # skip unannotated tags
+ tag_obj = test_data[2]
+ if not tag_obj:
+ continue
+ # tag date, id and name
+ self.assertEqual(metadata[3][0], gmtime_to_datetime(tag_obj[0]))
+ self.assertEqual(metadata[3][1].encode("utf-8"), tag_obj[1])
+ self.assertEqual(metadata[3][2].encode("utf-8"), tag)
blob - /dev/null
blob + 8608cecd13c7feb2ac07fc5454288c79ae9f1d56 (mode 644)
--- /dev/null
+++ tests/contrib/test_swift.py
+# test_swift.py -- Unittests for the Swift backend.
+# Copyright (C) 2013 eNovance SAS <licensing@enovance.com>
+#
+# Author: Fabien Boucher <fabien.boucher@enovance.com>
+#
+# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
+# General Public License as public by the Free Software Foundation; version 2.0
+# or (at your option) any later version. You can redistribute it and/or
+# modify it under the terms of either of these two licenses.
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# You should have received a copy of the licenses; if not, see
+# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
+# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
+# License, Version 2.0.
+#
+
+"""Tests for dulwich.contrib.swift."""
+
+import json
+import posixpath
+from io import BytesIO, StringIO
+from time import time
+from unittest import skipIf
+
+from dulwich.objects import Blob, Commit, Tag, Tree, parse_timezone
+
+from .. import TestCase
+from ..test_object_store import ObjectStoreTests
+
+missing_libs = []
+
+try:
+ import gevent # noqa: F401
+except ModuleNotFoundError:
+ missing_libs.append("gevent")
+
+try:
+ import geventhttpclient # noqa: F401
+except ModuleNotFoundError:
+ missing_libs.append("geventhttpclient")
+
+try:
+ from unittest.mock import patch
+except ModuleNotFoundError:
+ missing_libs.append("mock")
+
+skipmsg = "Required libraries are not installed (%r)" % missing_libs
+
+
+if not missing_libs:
+ from dulwich.contrib import swift
+
+config_file = """[swift]
+auth_url = http://127.0.0.1:8080/auth/%(version_str)s
+auth_ver = %(version_int)s
+username = test;tester
+password = testing
+region_name = %(region_name)s
+endpoint_type = %(endpoint_type)s
+concurrency = %(concurrency)s
+chunk_length = %(chunk_length)s
+cache_length = %(cache_length)s
+http_pool_length = %(http_pool_length)s
+http_timeout = %(http_timeout)s
+"""
+
+def_config_file = {
+ "version_str": "v1.0",
+ "version_int": 1,
+ "concurrency": 1,
+ "chunk_length": 12228,
+ "cache_length": 1,
+ "region_name": "test",
+ "endpoint_type": "internalURL",
+ "http_pool_length": 1,
+ "http_timeout": 1,
+}
+
+
+def create_swift_connector(store={}):
+ return lambda root, conf: FakeSwiftConnector(root, conf=conf, store=store)
+
+
+class Response:
+ def __init__(self, headers={}, status=200, content=None) -> None:
+ self.headers = headers
+ self.status_code = status
+ self.content = content
+
+ def __getitem__(self, key):
+ return self.headers[key]
+
+ def items(self):
+ return self.headers.items()
+
+ def read(self):
+ return self.content
+
+
+def fake_auth_request_v1(*args, **kwargs):
+ ret = Response(
+ {
+ "X-Storage-Url": "http://127.0.0.1:8080/v1.0/AUTH_fakeuser",
+ "X-Auth-Token": "12" * 10,
+ },
+ 200,
+ )
+ return ret
+
+
+def fake_auth_request_v1_error(*args, **kwargs):
+ ret = Response({}, 401)
+ return ret
+
+
+def fake_auth_request_v2(*args, **kwargs):
+ s_url = "http://127.0.0.1:8080/v1.0/AUTH_fakeuser"
+ resp = {
+ "access": {
+ "token": {"id": "12" * 10},
+ "serviceCatalog": [
+ {
+ "type": "object-store",
+ "endpoints": [
+ {
+ "region": "test",
+ "internalURL": s_url,
+ },
+ ],
+ },
+ ],
+ }
+ }
+ ret = Response(status=200, content=json.dumps(resp))
+ return ret
+
+
+def create_commit(data, marker=b"Default", blob=None):
+ if not blob:
+ blob = Blob.from_string(b"The blob content " + marker)
+ tree = Tree()
+ tree.add(b"thefile_" + marker, 0o100644, blob.id)
+ cmt = Commit()
+ if data:
+ assert isinstance(data[-1], Commit)
+ cmt.parents = [data[-1].id]
+ cmt.tree = tree.id
+ author = b"John Doe " + marker + b" <john@doe.net>"
+ cmt.author = cmt.committer = author
+ tz = parse_timezone(b"-0200")[0]
+ cmt.commit_time = cmt.author_time = int(time())
+ cmt.commit_timezone = cmt.author_timezone = tz
+ cmt.encoding = b"UTF-8"
+ cmt.message = b"The commit message " + marker
+ tag = Tag()
+ tag.tagger = b"john@doe.net"
+ tag.message = b"Annotated tag"
+ tag.tag_timezone = parse_timezone(b"-0200")[0]
+ tag.tag_time = cmt.author_time
+ tag.object = (Commit, cmt.id)
+ tag.name = b"v_" + marker + b"_0.1"
+ return blob, tree, tag, cmt
+
+
+def create_commits(length=1, marker=b"Default"):
+ data = []
+ for i in range(length):
+ _marker = (f"{marker}_{i}").encode()
+ blob, tree, tag, cmt = create_commit(data, _marker)
+ data.extend([blob, tree, tag, cmt])
+ return data
+
+
+@skipIf(missing_libs, skipmsg)
+class FakeSwiftConnector:
+ def __init__(self, root, conf, store=None) -> None:
+ if store:
+ self.store = store
+ else:
+ self.store = {}
+ self.conf = conf
+ self.root = root
+ self.concurrency = 1
+ self.chunk_length = 12228
+ self.cache_length = 1
+
+ def put_object(self, name, content):
+ name = posixpath.join(self.root, name)
+ if hasattr(content, "seek"):
+ content.seek(0)
+ content = content.read()
+ self.store[name] = content
+
+ def get_object(self, name, range=None):
+ name = posixpath.join(self.root, name)
+ if not range:
+ try:
+ return BytesIO(self.store[name])
+ except KeyError:
+ return None
+ else:
+ l, r = range.split("-")
+ try:
+ if not l:
+ r = -int(r)
+ return self.store[name][r:]
+ else:
+ return self.store[name][int(l) : int(r)]
+ except KeyError:
+ return None
+
+ def get_container_objects(self):
+ return [{"name": k.replace(self.root + "/", "")} for k in self.store]
+
+ def create_root(self):
+ if self.root in self.store.keys():
+ pass
+ else:
+ self.store[self.root] = ""
+
+ def get_object_stat(self, name):
+ name = posixpath.join(self.root, name)
+ if name not in self.store:
+ return None
+ return {"content-length": len(self.store[name])}
+
+
+@skipIf(missing_libs, skipmsg)
+class TestSwiftRepo(TestCase):
+ def setUp(self):
+ super().setUp()
+ self.conf = swift.load_conf(file=StringIO(config_file % def_config_file))
+
+ def test_init(self):
+ store = {"fakerepo/objects/pack": ""}
+ with patch(
+ "dulwich.contrib.swift.SwiftConnector",
+ new_callable=create_swift_connector,
+ store=store,
+ ):
+ swift.SwiftRepo("fakerepo", conf=self.conf)
+
+ def test_init_no_data(self):
+ with patch(
+ "dulwich.contrib.swift.SwiftConnector",
+ new_callable=create_swift_connector,
+ ):
+ self.assertRaises(Exception, swift.SwiftRepo, "fakerepo", self.conf)
+
+ def test_init_bad_data(self):
+ store = {"fakerepo/.git/objects/pack": ""}
+ with patch(
+ "dulwich.contrib.swift.SwiftConnector",
+ new_callable=create_swift_connector,
+ store=store,
+ ):
+ self.assertRaises(Exception, swift.SwiftRepo, "fakerepo", self.conf)
+
+ def test_put_named_file(self):
+ store = {"fakerepo/objects/pack": ""}
+ with patch(
+ "dulwich.contrib.swift.SwiftConnector",
+ new_callable=create_swift_connector,
+ store=store,
+ ):
+ repo = swift.SwiftRepo("fakerepo", conf=self.conf)
+ desc = b"Fake repo"
+ repo._put_named_file("description", desc)
+ self.assertEqual(repo.scon.store["fakerepo/description"], desc)
+
+ def test_init_bare(self):
+ fsc = FakeSwiftConnector("fakeroot", conf=self.conf)
+ with patch(
+ "dulwich.contrib.swift.SwiftConnector",
+ new_callable=create_swift_connector,
+ store=fsc.store,
+ ):
+ swift.SwiftRepo.init_bare(fsc, conf=self.conf)
+ self.assertIn("fakeroot/objects/pack", fsc.store)
+ self.assertIn("fakeroot/info/refs", fsc.store)
+ self.assertIn("fakeroot/description", fsc.store)
+
+
+@skipIf(missing_libs, skipmsg)
+class TestSwiftInfoRefsContainer(TestCase):
+ def setUp(self):
+ super().setUp()
+ content = (
+ b"22effb216e3a82f97da599b8885a6cadb488b4c5\trefs/heads/master\n"
+ b"cca703b0e1399008b53a1a236d6b4584737649e4\trefs/heads/dev"
+ )
+ self.store = {"fakerepo/info/refs": content}
+ self.conf = swift.load_conf(file=StringIO(config_file % def_config_file))
+ self.fsc = FakeSwiftConnector("fakerepo", conf=self.conf)
+ self.object_store = {}
+
+ def test_init(self):
+ """info/refs does not exists."""
+ irc = swift.SwiftInfoRefsContainer(self.fsc, self.object_store)
+ self.assertEqual(len(irc._refs), 0)
+ self.fsc.store = self.store
+ irc = swift.SwiftInfoRefsContainer(self.fsc, self.object_store)
+ self.assertIn(b"refs/heads/dev", irc.allkeys())
+ self.assertIn(b"refs/heads/master", irc.allkeys())
+
+ def test_set_if_equals(self):
+ self.fsc.store = self.store
+ irc = swift.SwiftInfoRefsContainer(self.fsc, self.object_store)
+ irc.set_if_equals(
+ b"refs/heads/dev",
+ b"cca703b0e1399008b53a1a236d6b4584737649e4",
+ b"1" * 40,
+ )
+ self.assertEqual(irc[b"refs/heads/dev"], b"1" * 40)
+
+ def test_remove_if_equals(self):
+ self.fsc.store = self.store
+ irc = swift.SwiftInfoRefsContainer(self.fsc, self.object_store)
+ irc.remove_if_equals(
+ b"refs/heads/dev", b"cca703b0e1399008b53a1a236d6b4584737649e4"
+ )
+ self.assertNotIn(b"refs/heads/dev", irc.allkeys())
+
+
+@skipIf(missing_libs, skipmsg)
+class TestSwiftConnector(TestCase):
+ def setUp(self):
+ super().setUp()
+ self.conf = swift.load_conf(file=StringIO(config_file % def_config_file))
+ with patch("geventhttpclient.HTTPClient.request", fake_auth_request_v1):
+ self.conn = swift.SwiftConnector("fakerepo", conf=self.conf)
+
+ def test_init_connector(self):
+ self.assertEqual(self.conn.auth_ver, "1")
+ self.assertEqual(self.conn.auth_url, "http://127.0.0.1:8080/auth/v1.0")
+ self.assertEqual(self.conn.user, "test:tester")
+ self.assertEqual(self.conn.password, "testing")
+ self.assertEqual(self.conn.root, "fakerepo")
+ self.assertEqual(
+ self.conn.storage_url, "http://127.0.0.1:8080/v1.0/AUTH_fakeuser"
+ )
+ self.assertEqual(self.conn.token, "12" * 10)
+ self.assertEqual(self.conn.http_timeout, 1)
+ self.assertEqual(self.conn.http_pool_length, 1)
+ self.assertEqual(self.conn.concurrency, 1)
+ self.conf.set("swift", "auth_ver", "2")
+ self.conf.set("swift", "auth_url", "http://127.0.0.1:8080/auth/v2.0")
+ with patch("geventhttpclient.HTTPClient.request", fake_auth_request_v2):
+ conn = swift.SwiftConnector("fakerepo", conf=self.conf)
+ self.assertEqual(conn.user, "tester")
+ self.assertEqual(conn.tenant, "test")
+ self.conf.set("swift", "auth_ver", "1")
+ self.conf.set("swift", "auth_url", "http://127.0.0.1:8080/auth/v1.0")
+ with patch("geventhttpclient.HTTPClient.request", fake_auth_request_v1_error):
+ self.assertRaises(
+ swift.SwiftException,
+ lambda: swift.SwiftConnector("fakerepo", conf=self.conf),
+ )
+
+ def test_root_exists(self):
+ with patch("geventhttpclient.HTTPClient.request", lambda *args: Response()):
+ self.assertEqual(self.conn.test_root_exists(), True)
+
+ def test_root_not_exists(self):
+ with patch(
+ "geventhttpclient.HTTPClient.request",
+ lambda *args: Response(status=404),
+ ):
+ self.assertEqual(self.conn.test_root_exists(), None)
+
+ def test_create_root(self):
+ with patch(
+ "dulwich.contrib.swift.SwiftConnector.test_root_exists",
+ lambda *args: None,
+ ):
+ with patch("geventhttpclient.HTTPClient.request", lambda *args: Response()):
+ self.assertEqual(self.conn.create_root(), None)
+
+ def test_create_root_fails(self):
+ with patch(
+ "dulwich.contrib.swift.SwiftConnector.test_root_exists",
+ lambda *args: None,
+ ):
+ with patch(
+ "geventhttpclient.HTTPClient.request",
+ lambda *args: Response(status=404),
+ ):
+ self.assertRaises(swift.SwiftException, self.conn.create_root)
+
+ def test_get_container_objects(self):
+ with patch(
+ "geventhttpclient.HTTPClient.request",
+ lambda *args: Response(content=json.dumps(({"name": "a"}, {"name": "b"}))),
+ ):
+ self.assertEqual(len(self.conn.get_container_objects()), 2)
+
+ def test_get_container_objects_fails(self):
+ with patch(
+ "geventhttpclient.HTTPClient.request",
+ lambda *args: Response(status=404),
+ ):
+ self.assertEqual(self.conn.get_container_objects(), None)
+
+ def test_get_object_stat(self):
+ with patch(
+ "geventhttpclient.HTTPClient.request",
+ lambda *args: Response(headers={"content-length": "10"}),
+ ):
+ self.assertEqual(self.conn.get_object_stat("a")["content-length"], "10")
+
+ def test_get_object_stat_fails(self):
+ with patch(
+ "geventhttpclient.HTTPClient.request",
+ lambda *args: Response(status=404),
+ ):
+ self.assertEqual(self.conn.get_object_stat("a"), None)
+
+ def test_put_object(self):
+ with patch(
+ "geventhttpclient.HTTPClient.request",
+ lambda *args, **kwargs: Response(),
+ ):
+ self.assertEqual(self.conn.put_object("a", BytesIO(b"content")), None)
+
+ def test_put_object_fails(self):
+ with patch(
+ "geventhttpclient.HTTPClient.request",
+ lambda *args, **kwargs: Response(status=400),
+ ):
+ self.assertRaises(
+ swift.SwiftException,
+ lambda: self.conn.put_object("a", BytesIO(b"content")),
+ )
+
+ def test_get_object(self):
+ with patch(
+ "geventhttpclient.HTTPClient.request",
+ lambda *args, **kwargs: Response(content=b"content"),
+ ):
+ self.assertEqual(self.conn.get_object("a").read(), b"content")
+ with patch(
+ "geventhttpclient.HTTPClient.request",
+ lambda *args, **kwargs: Response(content=b"content"),
+ ):
+ self.assertEqual(self.conn.get_object("a", range="0-6"), b"content")
+
+ def test_get_object_fails(self):
+ with patch(
+ "geventhttpclient.HTTPClient.request",
+ lambda *args, **kwargs: Response(status=404),
+ ):
+ self.assertEqual(self.conn.get_object("a"), None)
+
+ def test_del_object(self):
+ with patch("geventhttpclient.HTTPClient.request", lambda *args: Response()):
+ self.assertEqual(self.conn.del_object("a"), None)
+
+ def test_del_root(self):
+ with patch(
+ "dulwich.contrib.swift.SwiftConnector.del_object",
+ lambda *args: None,
+ ):
+ with patch(
+ "dulwich.contrib.swift.SwiftConnector." "get_container_objects",
+ lambda *args: ({"name": "a"}, {"name": "b"}),
+ ):
+ with patch(
+ "geventhttpclient.HTTPClient.request",
+ lambda *args: Response(),
+ ):
+ self.assertEqual(self.conn.del_root(), None)
+
+
+@skipIf(missing_libs, skipmsg)
+class SwiftObjectStoreTests(ObjectStoreTests, TestCase):
+ def setUp(self):
+ TestCase.setUp(self)
+ conf = swift.load_conf(file=StringIO(config_file % def_config_file))
+ fsc = FakeSwiftConnector("fakerepo", conf=conf)
+ self.store = swift.SwiftObjectStore(fsc)
blob - /dev/null
blob + 2a19f6df2b680aeb87dcd5e56cdd3e03e22b9942 (mode 644)
--- /dev/null
+++ tests/contrib/test_swift_smoke.py
+# test_smoke.py -- Functional tests for the Swift backend.
+# Copyright (C) 2013 eNovance SAS <licensing@enovance.com>
+#
+# Author: Fabien Boucher <fabien.boucher@enovance.com>
+#
+# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
+# General Public License as public by the Free Software Foundation; version 2.0
+# or (at your option) any later version. You can redistribute it and/or
+# modify it under the terms of either of these two licenses.
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# You should have received a copy of the licenses; if not, see
+# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
+# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
+# License, Version 2.0.
+#
+
+"""Start functional tests.
+
+A Swift installation must be available before
+starting those tests. The account and authentication method used
+during this functional tests must be changed in the configuration file
+passed as environment variable.
+The container used to create a fake repository is defined
+in cls.fakerepo and will be deleted after the tests.
+
+DULWICH_SWIFT_CFG=/tmp/conf.cfg PYTHONPATH=. python -m unittest \
+ dulwich.tests_swift.test_smoke
+"""
+
+import os
+import shutil
+import tempfile
+import unittest
+
+import gevent
+from gevent import monkey
+
+monkey.patch_all()
+
+from dulwich import client, index, objects, repo, server # noqa: E402
+from dulwich.contrib import swift # noqa: E402
+
+
+class DulwichServer:
+ """Start the TCPGitServer with Swift backend."""
+
+ def __init__(self, backend, port) -> None:
+ self.port = port
+ self.backend = backend
+
+ def run(self):
+ self.server = server.TCPGitServer(self.backend, "localhost", port=self.port)
+ self.job = gevent.spawn(self.server.serve_forever)
+
+ def stop(self):
+ self.server.shutdown()
+ gevent.joinall((self.job,))
+
+
+class SwiftSystemBackend(server.Backend):
+ def open_repository(self, path):
+ return swift.SwiftRepo(path, conf=swift.load_conf())
+
+
+class SwiftRepoSmokeTest(unittest.TestCase):
+ @classmethod
+ def setUpClass(cls):
+ cls.backend = SwiftSystemBackend()
+ cls.port = 9148
+ cls.server_address = "localhost"
+ cls.fakerepo = "fakerepo"
+ cls.th_server = DulwichServer(cls.backend, cls.port)
+ cls.th_server.run()
+ cls.conf = swift.load_conf()
+
+ @classmethod
+ def tearDownClass(cls):
+ cls.th_server.stop()
+
+ def setUp(self):
+ self.scon = swift.SwiftConnector(self.fakerepo, self.conf)
+ if self.scon.test_root_exists():
+ try:
+ self.scon.del_root()
+ except swift.SwiftException:
+ pass
+ self.temp_d = tempfile.mkdtemp()
+ if os.path.isdir(self.temp_d):
+ shutil.rmtree(self.temp_d)
+
+ def tearDown(self):
+ if self.scon.test_root_exists():
+ try:
+ self.scon.del_root()
+ except swift.SwiftException:
+ pass
+ if os.path.isdir(self.temp_d):
+ shutil.rmtree(self.temp_d)
+
+ def test_init_bare(self):
+ swift.SwiftRepo.init_bare(self.scon, self.conf)
+ self.assertTrue(self.scon.test_root_exists())
+ obj = self.scon.get_container_objects()
+ filtered = [
+ o for o in obj if o["name"] == "info/refs" or o["name"] == "objects/pack"
+ ]
+ self.assertEqual(len(filtered), 2)
+
+ def test_clone_bare(self):
+ local_repo = repo.Repo.init(self.temp_d, mkdir=True)
+ swift.SwiftRepo.init_bare(self.scon, self.conf)
+ tcp_client = client.TCPGitClient(self.server_address, port=self.port)
+ remote_refs = tcp_client.fetch(self.fakerepo, local_repo)
+ # The remote repo is empty (no refs retrieved)
+ self.assertEqual(remote_refs, None)
+
+ def test_push_commit(self):
+ def determine_wants(*args, **kwargs):
+ return {"refs/heads/master": local_repo.refs["HEAD"]}
+
+ local_repo = repo.Repo.init(self.temp_d, mkdir=True)
+ # Nothing in the staging area
+ local_repo.do_commit("Test commit", "fbo@localhost")
+ sha = local_repo.refs.read_loose_ref("refs/heads/master")
+ swift.SwiftRepo.init_bare(self.scon, self.conf)
+ tcp_client = client.TCPGitClient(self.server_address, port=self.port)
+ tcp_client.send_pack(
+ self.fakerepo, determine_wants, local_repo.generate_pack_data
+ )
+ swift_repo = swift.SwiftRepo("fakerepo", self.conf)
+ remote_sha = swift_repo.refs.read_loose_ref("refs/heads/master")
+ self.assertEqual(sha, remote_sha)
+
+ def test_push_branch(self):
+ def determine_wants(*args, **kwargs):
+ return {"refs/heads/mybranch": local_repo.refs["refs/heads/mybranch"]}
+
+ local_repo = repo.Repo.init(self.temp_d, mkdir=True)
+ # Nothing in the staging area
+ local_repo.do_commit("Test commit", "fbo@localhost", ref="refs/heads/mybranch")
+ sha = local_repo.refs.read_loose_ref("refs/heads/mybranch")
+ swift.SwiftRepo.init_bare(self.scon, self.conf)
+ tcp_client = client.TCPGitClient(self.server_address, port=self.port)
+ tcp_client.send_pack(
+ "/fakerepo", determine_wants, local_repo.generate_pack_data
+ )
+ swift_repo = swift.SwiftRepo(self.fakerepo, self.conf)
+ remote_sha = swift_repo.refs.read_loose_ref("refs/heads/mybranch")
+ self.assertEqual(sha, remote_sha)
+
+ def test_push_multiple_branch(self):
+ def determine_wants(*args, **kwargs):
+ return {
+ "refs/heads/mybranch": local_repo.refs["refs/heads/mybranch"],
+ "refs/heads/master": local_repo.refs["refs/heads/master"],
+ "refs/heads/pullr-108": local_repo.refs["refs/heads/pullr-108"],
+ }
+
+ local_repo = repo.Repo.init(self.temp_d, mkdir=True)
+ # Nothing in the staging area
+ local_shas = {}
+ remote_shas = {}
+ for branch in ("master", "mybranch", "pullr-108"):
+ local_shas[branch] = local_repo.do_commit(
+ "Test commit %s" % branch,
+ "fbo@localhost",
+ ref="refs/heads/%s" % branch,
+ )
+ swift.SwiftRepo.init_bare(self.scon, self.conf)
+ tcp_client = client.TCPGitClient(self.server_address, port=self.port)
+ tcp_client.send_pack(
+ self.fakerepo, determine_wants, local_repo.generate_pack_data
+ )
+ swift_repo = swift.SwiftRepo("fakerepo", self.conf)
+ for branch in ("master", "mybranch", "pullr-108"):
+ remote_shas[branch] = swift_repo.refs.read_loose_ref(
+ "refs/heads/%s" % branch
+ )
+ self.assertDictEqual(local_shas, remote_shas)
+
+ def test_push_data_branch(self):
+ def determine_wants(*args, **kwargs):
+ return {"refs/heads/master": local_repo.refs["HEAD"]}
+
+ local_repo = repo.Repo.init(self.temp_d, mkdir=True)
+ os.mkdir(os.path.join(self.temp_d, "dir"))
+ files = ("testfile", "testfile2", "dir/testfile3")
+ i = 0
+ for f in files:
+ open(os.path.join(self.temp_d, f), "w").write("DATA %s" % i)
+ i += 1
+ local_repo.stage(files)
+ local_repo.do_commit("Test commit", "fbo@localhost", ref="refs/heads/master")
+ swift.SwiftRepo.init_bare(self.scon, self.conf)
+ tcp_client = client.TCPGitClient(self.server_address, port=self.port)
+ tcp_client.send_pack(
+ self.fakerepo, determine_wants, local_repo.generate_pack_data
+ )
+ swift_repo = swift.SwiftRepo("fakerepo", self.conf)
+ commit_sha = swift_repo.refs.read_loose_ref("refs/heads/master")
+ otype, data = swift_repo.object_store.get_raw(commit_sha)
+ commit = objects.ShaFile.from_raw_string(otype, data)
+ otype, data = swift_repo.object_store.get_raw(commit._tree)
+ tree = objects.ShaFile.from_raw_string(otype, data)
+ objs = tree.items()
+ objs_ = []
+ for tree_entry in objs:
+ objs_.append(swift_repo.object_store.get_raw(tree_entry.sha))
+ # Blob
+ self.assertEqual(objs_[1][1], "DATA 0")
+ self.assertEqual(objs_[2][1], "DATA 1")
+ # Tree
+ self.assertEqual(objs_[0][0], 2)
+
+ def test_clone_then_push_data(self):
+ self.test_push_data_branch()
+ shutil.rmtree(self.temp_d)
+ local_repo = repo.Repo.init(self.temp_d, mkdir=True)
+ tcp_client = client.TCPGitClient(self.server_address, port=self.port)
+ remote_refs = tcp_client.fetch(self.fakerepo, local_repo)
+ files = (
+ os.path.join(self.temp_d, "testfile"),
+ os.path.join(self.temp_d, "testfile2"),
+ )
+ local_repo["HEAD"] = remote_refs["refs/heads/master"]
+ indexfile = local_repo.index_path()
+ tree = local_repo["HEAD"].tree
+ index.build_index_from_tree(
+ local_repo.path, indexfile, local_repo.object_store, tree
+ )
+ for f in files:
+ self.assertEqual(os.path.isfile(f), True)
+
+ def determine_wants(*args, **kwargs):
+ return {"refs/heads/master": local_repo.refs["HEAD"]}
+
+ os.mkdir(os.path.join(self.temp_d, "test"))
+ files = ("testfile11", "testfile22", "test/testfile33")
+ i = 0
+ for f in files:
+ open(os.path.join(self.temp_d, f), "w").write("DATA %s" % i)
+ i += 1
+ local_repo.stage(files)
+ local_repo.do_commit("Test commit", "fbo@localhost", ref="refs/heads/master")
+ tcp_client.send_pack(
+ "/fakerepo", determine_wants, local_repo.generate_pack_data
+ )
+
+ def test_push_remove_branch(self):
+ def determine_wants(*args, **kwargs):
+ return {
+ "refs/heads/pullr-108": objects.ZERO_SHA,
+ "refs/heads/master": local_repo.refs["refs/heads/master"],
+ "refs/heads/mybranch": local_repo.refs["refs/heads/mybranch"],
+ }
+
+ self.test_push_multiple_branch()
+ local_repo = repo.Repo(self.temp_d)
+ tcp_client = client.TCPGitClient(self.server_address, port=self.port)
+ tcp_client.send_pack(
+ self.fakerepo, determine_wants, local_repo.generate_pack_data
+ )
+ swift_repo = swift.SwiftRepo("fakerepo", self.conf)
+ self.assertNotIn("refs/heads/pullr-108", swift_repo.refs.allkeys())
+
+ def test_push_annotated_tag(self):
+ def determine_wants(*args, **kwargs):
+ return {
+ "refs/heads/master": local_repo.refs["HEAD"],
+ "refs/tags/v1.0": local_repo.refs["refs/tags/v1.0"],
+ }
+
+ local_repo = repo.Repo.init(self.temp_d, mkdir=True)
+ # Nothing in the staging area
+ sha = local_repo.do_commit("Test commit", "fbo@localhost")
+ otype, data = local_repo.object_store.get_raw(sha)
+ commit = objects.ShaFile.from_raw_string(otype, data)
+ tag = objects.Tag()
+ tag.tagger = "fbo@localhost"
+ tag.message = "Annotated tag"
+ tag.tag_timezone = objects.parse_timezone("-0200")[0]
+ tag.tag_time = commit.author_time
+ tag.object = (objects.Commit, commit.id)
+ tag.name = "v0.1"
+ local_repo.object_store.add_object(tag)
+ local_repo.refs["refs/tags/v1.0"] = tag.id
+ swift.SwiftRepo.init_bare(self.scon, self.conf)
+ tcp_client = client.TCPGitClient(self.server_address, port=self.port)
+ tcp_client.send_pack(
+ self.fakerepo, determine_wants, local_repo.generate_pack_data
+ )
+ swift_repo = swift.SwiftRepo(self.fakerepo, self.conf)
+ tag_sha = swift_repo.refs.read_loose_ref("refs/tags/v1.0")
+ otype, data = swift_repo.object_store.get_raw(tag_sha)
+ rtag = objects.ShaFile.from_raw_string(otype, data)
+ self.assertEqual(rtag.object[1], commit.id)
+ self.assertEqual(rtag.id, tag.id)
+
+
+if __name__ == "__main__":
+ unittest.main()
blob - /dev/null
blob + a0a0508865cbb93ca48f9cceafe74f23b4e45921 (mode 644)
--- /dev/null
+++ tests/test_archive.py
+# test_archive.py -- tests for archive
+# Copyright (C) 2015 Jelmer Vernooij <jelmer@jelmer.uk>
+#
+# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
+# General Public License as public by the Free Software Foundation; version 2.0
+# or (at your option) any later version. You can redistribute it and/or
+# modify it under the terms of either of these two licenses.
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# You should have received a copy of the licenses; if not, see
+# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
+# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
+# License, Version 2.0.
+#
+
+"""Tests for archive support."""
+
+import struct
+import tarfile
+from io import BytesIO
+from unittest import skipUnless
+
+from dulwich.archive import tar_stream
+from dulwich.object_store import MemoryObjectStore
+from dulwich.objects import Blob, Tree
+
+from . import TestCase
+from .utils import build_commit_graph
+
+try:
+ from unittest.mock import patch
+except ImportError:
+ patch = None # type: ignore
+
+
+class ArchiveTests(TestCase):
+ def test_empty(self):
+ store = MemoryObjectStore()
+ c1, c2, c3 = build_commit_graph(store, [[1], [2, 1], [3, 1, 2]])
+ tree = store[c3.tree]
+ stream = b"".join(tar_stream(store, tree, 10))
+ out = BytesIO(stream)
+ tf = tarfile.TarFile(fileobj=out)
+ self.addCleanup(tf.close)
+ self.assertEqual([], tf.getnames())
+
+ def _get_example_tar_stream(self, *tar_stream_args, **tar_stream_kwargs):
+ store = MemoryObjectStore()
+ b1 = Blob.from_string(b"somedata")
+ store.add_object(b1)
+ t1 = Tree()
+ t1.add(b"somename", 0o100644, b1.id)
+ store.add_object(t1)
+ stream = b"".join(tar_stream(store, t1, *tar_stream_args, **tar_stream_kwargs))
+ return BytesIO(stream)
+
+ def test_simple(self):
+ stream = self._get_example_tar_stream(mtime=0)
+ tf = tarfile.TarFile(fileobj=stream)
+ self.addCleanup(tf.close)
+ self.assertEqual(["somename"], tf.getnames())
+
+ def test_unicode(self):
+ store = MemoryObjectStore()
+ b1 = Blob.from_string(b"somedata")
+ store.add_object(b1)
+ t1 = Tree()
+ t1.add("ő".encode(), 0o100644, b1.id)
+ store.add_object(t1)
+ stream = b"".join(tar_stream(store, t1, mtime=0))
+ tf = tarfile.TarFile(fileobj=BytesIO(stream))
+ self.addCleanup(tf.close)
+ self.assertEqual(["ő"], tf.getnames())
+
+ def test_prefix(self):
+ stream = self._get_example_tar_stream(mtime=0, prefix=b"blah")
+ tf = tarfile.TarFile(fileobj=stream)
+ self.addCleanup(tf.close)
+ self.assertEqual(["blah/somename"], tf.getnames())
+
+ def test_gzip_mtime(self):
+ stream = self._get_example_tar_stream(mtime=1234, format="gz")
+ expected_mtime = struct.pack("<L", 1234)
+ self.assertEqual(stream.getvalue()[4:8], expected_mtime)
+
+ @skipUnless(patch, "Required mock.patch")
+ def test_same_file(self):
+ contents = [None, None]
+ for format in ["", "gz", "bz2"]:
+ for i in [0, 1]:
+ with patch("time.time", return_value=i):
+ stream = self._get_example_tar_stream(mtime=0, format=format)
+ contents[i] = stream.getvalue()
+ self.assertEqual(
+ contents[0],
+ contents[1],
+ "Different file contents for format %r" % format,
+ )
blob - /dev/null
blob + 34e2e47b1b89accf28d7c4939d956693b4dc486e (mode 644)
--- /dev/null
+++ tests/test_blackbox.py
+# test_blackbox.py -- blackbox tests
+# Copyright (C) 2010 Jelmer Vernooij <jelmer@jelmer.uk>
+#
+# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
+# General Public License as public by the Free Software Foundation; version 2.0
+# or (at your option) any later version. You can redistribute it and/or
+# modify it under the terms of either of these two licenses.
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# You should have received a copy of the licenses; if not, see
+# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
+# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
+# License, Version 2.0.
+#
+
+"""Blackbox tests for Dulwich commands."""
+
+import shutil
+import tempfile
+
+from dulwich.repo import Repo
+
+from . import BlackboxTestCase
+
+
+class GitReceivePackTests(BlackboxTestCase):
+ """Blackbox tests for dul-receive-pack."""
+
+ def setUp(self):
+ super().setUp()
+ self.path = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, self.path)
+ self.repo = Repo.init(self.path)
+
+ def test_basic(self):
+ process = self.run_command("dul-receive-pack", [self.path])
+ (stdout, stderr) = process.communicate(b"0000")
+ self.assertEqual(b"0000", stdout[-4:])
+ self.assertEqual(0, process.returncode)
+
+ def test_missing_arg(self):
+ process = self.run_command("dul-receive-pack", [])
+ (stdout, stderr) = process.communicate()
+ self.assertEqual(
+ [b"usage: dul-receive-pack <git-dir>"], stderr.splitlines()[-1:]
+ )
+ self.assertEqual(b"", stdout)
+ self.assertEqual(1, process.returncode)
+
+
+class GitUploadPackTests(BlackboxTestCase):
+ """Blackbox tests for dul-upload-pack."""
+
+ def setUp(self):
+ super().setUp()
+ self.path = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, self.path)
+ self.repo = Repo.init(self.path)
+
+ def test_missing_arg(self):
+ process = self.run_command("dul-upload-pack", [])
+ (stdout, stderr) = process.communicate()
+ self.assertEqual(
+ [b"usage: dul-upload-pack <git-dir>"], stderr.splitlines()[-1:]
+ )
+ self.assertEqual(b"", stdout)
+ self.assertEqual(1, process.returncode)
blob - /dev/null
blob + d60d2227261a33b6f6e7853266e882f68e179df8 (mode 644)
--- /dev/null
+++ tests/test_bundle.py
+# test_bundle.py -- tests for bundle
+# Copyright (C) 2020 Jelmer Vernooij <jelmer@jelmer.uk>
+#
+# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
+# General Public License as public by the Free Software Foundation; version 2.0
+# or (at your option) any later version. You can redistribute it and/or
+# modify it under the terms of either of these two licenses.
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# You should have received a copy of the licenses; if not, see
+# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
+# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
+# License, Version 2.0.
+#
+
+"""Tests for bundle support."""
+
+import os
+import tempfile
+from io import BytesIO
+
+from dulwich.bundle import Bundle, read_bundle, write_bundle
+from dulwich.pack import PackData, write_pack_objects
+
+from . import TestCase
+
+
+class BundleTests(TestCase):
+ def test_roundtrip_bundle(self):
+ origbundle = Bundle()
+ origbundle.version = 3
+ origbundle.capabilities = {"foo": None}
+ origbundle.references = {b"refs/heads/master": b"ab" * 20}
+ origbundle.prerequisites = [(b"cc" * 20, "comment")]
+ b = BytesIO()
+ write_pack_objects(b.write, [])
+ b.seek(0)
+ origbundle.pack_data = PackData.from_file(b)
+ with tempfile.TemporaryDirectory() as td:
+ with open(os.path.join(td, "foo"), "wb") as f:
+ write_bundle(f, origbundle)
+
+ with open(os.path.join(td, "foo"), "rb") as f:
+ newbundle = read_bundle(f)
+
+ self.assertEqual(origbundle, newbundle)
blob - /dev/null
blob + 3ada349921cf98083d73749174ad67ec17466072 (mode 644)
--- /dev/null
+++ tests/test_client.py
+# test_client.py -- Tests for the git protocol, client side
+# Copyright (C) 2009 Jelmer Vernooij <jelmer@jelmer.uk>
+#
+# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
+# General Public License as public by the Free Software Foundation; version 2.0
+# or (at your option) any later version. You can redistribute it and/or
+# modify it under the terms of either of these two licenses.
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# You should have received a copy of the licenses; if not, see
+# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
+# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
+# License, Version 2.0.
+#
+
+import base64
+import os
+import shutil
+import sys
+import tempfile
+import warnings
+from io import BytesIO
+from typing import Dict
+from unittest.mock import patch
+from urllib.parse import quote as urlquote
+from urllib.parse import urlparse
+
+import dulwich
+from dulwich import client
+from dulwich.client import (
+ FetchPackResult,
+ GitProtocolError,
+ HangupException,
+ HttpGitClient,
+ InvalidWants,
+ LocalGitClient,
+ PLinkSSHVendor,
+ ReportStatusParser,
+ SendPackError,
+ SSHGitClient,
+ StrangeHostname,
+ SubprocessSSHVendor,
+ TCPGitClient,
+ TraditionalGitClient,
+ _remote_error_from_stderr,
+ check_wants,
+ default_urllib3_manager,
+ get_credentials_from_store,
+ get_transport_and_path,
+ get_transport_and_path_from_url,
+ parse_rsync_url,
+)
+from dulwich.config import ConfigDict
+from dulwich.objects import Commit, Tree
+from dulwich.pack import pack_objects_to_data, write_pack_data, write_pack_objects
+from dulwich.protocol import TCP_GIT_PORT, Protocol
+from dulwich.repo import MemoryRepo, Repo
+
+from . import TestCase, skipIf
+from .utils import open_repo, setup_warning_catcher, tear_down_repo
+
+
+class DummyClient(TraditionalGitClient):
+ def __init__(self, can_read, read, write) -> None:
+ self.can_read = can_read
+ self.read = read
+ self.write = write
+ TraditionalGitClient.__init__(self)
+
+ def _connect(self, service, path):
+ return Protocol(self.read, self.write), self.can_read, None
+
+
+class DummyPopen:
+ def __init__(self, *args, **kwards) -> None:
+ self.stdin = BytesIO(b"stdin")
+ self.stdout = BytesIO(b"stdout")
+ self.stderr = BytesIO(b"stderr")
+ self.returncode = 0
+ self.args = args
+ self.kwargs = kwards
+
+ def communicate(self, *args, **kwards):
+ return ("Running", "")
+
+ def wait(self, *args, **kwards):
+ return False
+
+
+# TODO(durin42): add unit-level tests of GitClient
+class GitClientTests(TestCase):
+ def setUp(self):
+ super().setUp()
+ self.rout = BytesIO()
+ self.rin = BytesIO()
+ self.client = DummyClient(lambda x: True, self.rin.read, self.rout.write)
+
+ def test_caps(self):
+ agent_cap = ("agent=dulwich/%d.%d.%d" % dulwich.__version__).encode("ascii")
+ self.assertEqual(
+ {
+ b"multi_ack",
+ b"side-band-64k",
+ b"ofs-delta",
+ b"thin-pack",
+ b"multi_ack_detailed",
+ b"shallow",
+ agent_cap,
+ },
+ set(self.client._fetch_capabilities),
+ )
+ self.assertEqual(
+ {
+ b"delete-refs",
+ b"ofs-delta",
+ b"report-status",
+ b"side-band-64k",
+ agent_cap,
+ },
+ set(self.client._send_capabilities),
+ )
+
+ def test_archive_ack(self):
+ self.rin.write(b"0009NACK\n" b"0000")
+ self.rin.seek(0)
+ self.client.archive(b"bla", b"HEAD", None, None)
+ self.assertEqual(self.rout.getvalue(), b"0011argument HEAD0000")
+
+ def test_fetch_empty(self):
+ self.rin.write(b"0000")
+ self.rin.seek(0)
+
+ def check_heads(heads, **kwargs):
+ self.assertEqual(heads, {})
+ return []
+
+ ret = self.client.fetch_pack(b"/", check_heads, None, None)
+ self.assertEqual({}, ret.refs)
+ self.assertEqual({}, ret.symrefs)
+
+ def test_fetch_pack_ignores_magic_ref(self):
+ self.rin.write(
+ b"00000000000000000000000000000000000000000000 capabilities^{}"
+ b"\x00 multi_ack "
+ b"thin-pack side-band side-band-64k ofs-delta shallow no-progress "
+ b"include-tag\n"
+ b"0000"
+ )
+ self.rin.seek(0)
+
+ def check_heads(heads, **kwargs):
+ self.assertEqual({}, heads)
+ return []
+
+ ret = self.client.fetch_pack(b"bla", check_heads, None, None, None)
+ self.assertEqual({}, ret.refs)
+ self.assertEqual({}, ret.symrefs)
+ self.assertEqual(self.rout.getvalue(), b"0000")
+
+ def test_fetch_pack_none(self):
+ self.rin.write(
+ b"008855dcc6bf963f922e1ed5c4bbaaefcfacef57b1d7 HEAD\x00multi_ack "
+ b"thin-pack side-band side-band-64k ofs-delta shallow no-progress "
+ b"include-tag\n"
+ b"0000"
+ )
+ self.rin.seek(0)
+ ret = self.client.fetch_pack(
+ b"bla", lambda heads, **kwargs: [], None, None, None
+ )
+ self.assertEqual(
+ {b"HEAD": b"55dcc6bf963f922e1ed5c4bbaaefcfacef57b1d7"}, ret.refs
+ )
+ self.assertEqual({}, ret.symrefs)
+ self.assertEqual(self.rout.getvalue(), b"0000")
+
+ def test_send_pack_no_sideband64k_with_update_ref_error(self) -> None:
+ # No side-bank-64k reported by server shouldn't try to parse
+ # side band data
+ pkts = [
+ b"55dcc6bf963f922e1ed5c4bbaaefcfacef57b1d7 capabilities^{}"
+ b"\x00 report-status delete-refs ofs-delta\n",
+ b"",
+ b"unpack ok",
+ b"ng refs/foo/bar pre-receive hook declined",
+ b"",
+ ]
+ for pkt in pkts:
+ if pkt == b"":
+ self.rin.write(b"0000")
+ else:
+ self.rin.write(("%04x" % (len(pkt) + 4)).encode("ascii") + pkt)
+ self.rin.seek(0)
+
+ tree = Tree()
+ commit = Commit()
+ commit.tree = tree
+ commit.parents = []
+ commit.author = commit.committer = b"test user"
+ commit.commit_time = commit.author_time = 1174773719
+ commit.commit_timezone = commit.author_timezone = 0
+ commit.encoding = b"UTF-8"
+ commit.message = b"test message"
+
+ def update_refs(refs):
+ return {
+ b"refs/foo/bar": commit.id,
+ }
+
+ def generate_pack_data(have, want, ofs_delta=False, progress=None):
+ return pack_objects_to_data(
+ [
+ (commit, None),
+ (tree, b""),
+ ]
+ )
+
+ result = self.client.send_pack("blah", update_refs, generate_pack_data)
+ self.assertEqual(
+ {b"refs/foo/bar": "pre-receive hook declined"}, result.ref_status
+ )
+ self.assertEqual({b"refs/foo/bar": commit.id}, result.refs)
+
+ def test_send_pack_none(self):
+ # Set ref to current value
+ self.rin.write(
+ b"0078310ca9477129b8586fa2afc779c1f57cf64bba6c "
+ b"refs/heads/master\x00 report-status delete-refs "
+ b"side-band-64k quiet ofs-delta\n"
+ b"0000"
+ )
+ self.rin.seek(0)
+
+ def update_refs(refs):
+ return {b"refs/heads/master": b"310ca9477129b8586fa2afc779c1f57cf64bba6c"}
+
+ def generate_pack_data(have, want, ofs_delta=False, progress=None):
+ return 0, []
+
+ self.client.send_pack(b"/", update_refs, generate_pack_data)
+ self.assertEqual(self.rout.getvalue(), b"0000")
+
+ def test_send_pack_keep_and_delete(self):
+ self.rin.write(
+ b"0063310ca9477129b8586fa2afc779c1f57cf64bba6c "
+ b"refs/heads/master\x00report-status delete-refs ofs-delta\n"
+ b"003f310ca9477129b8586fa2afc779c1f57cf64bba6c refs/heads/keepme\n"
+ b"0000000eunpack ok\n"
+ b"0019ok refs/heads/master\n"
+ b"0000"
+ )
+ self.rin.seek(0)
+
+ def update_refs(refs):
+ return {b"refs/heads/master": b"0" * 40}
+
+ def generate_pack_data(have, want, ofs_delta=False, progress=None):
+ return 0, []
+
+ self.client.send_pack(b"/", update_refs, generate_pack_data)
+ self.assertEqual(
+ self.rout.getvalue(),
+ b"008b310ca9477129b8586fa2afc779c1f57cf64bba6c "
+ b"0000000000000000000000000000000000000000 "
+ b"refs/heads/master\x00delete-refs ofs-delta report-status0000",
+ )
+
+ def test_send_pack_delete_only(self):
+ self.rin.write(
+ b"0063310ca9477129b8586fa2afc779c1f57cf64bba6c "
+ b"refs/heads/master\x00report-status delete-refs ofs-delta\n"
+ b"0000000eunpack ok\n"
+ b"0019ok refs/heads/master\n"
+ b"0000"
+ )
+ self.rin.seek(0)
+
+ def update_refs(refs):
+ return {b"refs/heads/master": b"0" * 40}
+
+ def generate_pack_data(have, want, ofs_delta=False, progress=None):
+ return 0, []
+
+ self.client.send_pack(b"/", update_refs, generate_pack_data)
+ self.assertEqual(
+ self.rout.getvalue(),
+ b"008b310ca9477129b8586fa2afc779c1f57cf64bba6c "
+ b"0000000000000000000000000000000000000000 "
+ b"refs/heads/master\x00delete-refs ofs-delta report-status0000",
+ )
+
+ def test_send_pack_new_ref_only(self):
+ self.rin.write(
+ b"0063310ca9477129b8586fa2afc779c1f57cf64bba6c "
+ b"refs/heads/master\x00report-status delete-refs ofs-delta\n"
+ b"0000000eunpack ok\n"
+ b"0019ok refs/heads/blah12\n"
+ b"0000"
+ )
+ self.rin.seek(0)
+
+ def update_refs(refs):
+ return {
+ b"refs/heads/blah12": b"310ca9477129b8586fa2afc779c1f57cf64bba6c",
+ b"refs/heads/master": b"310ca9477129b8586fa2afc779c1f57cf64bba6c",
+ }
+
+ def generate_pack_data(have, want, ofs_delta=False, progress=None):
+ return 0, []
+
+ f = BytesIO()
+ write_pack_objects(f.write, [])
+ self.client.send_pack("/", update_refs, generate_pack_data)
+ self.assertEqual(
+ self.rout.getvalue(),
+ b"008b0000000000000000000000000000000000000000 "
+ b"310ca9477129b8586fa2afc779c1f57cf64bba6c "
+ b"refs/heads/blah12\x00delete-refs ofs-delta report-status0000"
+ + f.getvalue(),
+ )
+
+ def test_send_pack_new_ref(self):
+ self.rin.write(
+ b"0064310ca9477129b8586fa2afc779c1f57cf64bba6c "
+ b"refs/heads/master\x00 report-status delete-refs ofs-delta\n"
+ b"0000000eunpack ok\n"
+ b"0019ok refs/heads/blah12\n"
+ b"0000"
+ )
+ self.rin.seek(0)
+
+ tree = Tree()
+ commit = Commit()
+ commit.tree = tree
+ commit.parents = []
+ commit.author = commit.committer = b"test user"
+ commit.commit_time = commit.author_time = 1174773719
+ commit.commit_timezone = commit.author_timezone = 0
+ commit.encoding = b"UTF-8"
+ commit.message = b"test message"
+
+ def update_refs(refs):
+ return {
+ b"refs/heads/blah12": commit.id,
+ b"refs/heads/master": b"310ca9477129b8586fa2afc779c1f57cf64bba6c",
+ }
+
+ def generate_pack_data(have, want, ofs_delta=False, progress=None):
+ return pack_objects_to_data(
+ [
+ (commit, None),
+ (tree, b""),
+ ]
+ )
+
+ f = BytesIO()
+ count, records = generate_pack_data(None, None)
+ write_pack_data(f.write, records, num_records=count)
+ self.client.send_pack(b"/", update_refs, generate_pack_data)
+ self.assertEqual(
+ self.rout.getvalue(),
+ b"008b0000000000000000000000000000000000000000 "
+ + commit.id
+ + b" refs/heads/blah12\x00delete-refs ofs-delta report-status0000"
+ + f.getvalue(),
+ )
+
+ def test_send_pack_no_deleteref_delete_only(self):
+ pkts = [
+ b"310ca9477129b8586fa2afc779c1f57cf64bba6c refs/heads/master"
+ b"\x00 report-status ofs-delta\n",
+ b"",
+ b"",
+ ]
+ for pkt in pkts:
+ if pkt == b"":
+ self.rin.write(b"0000")
+ else:
+ self.rin.write(("%04x" % (len(pkt) + 4)).encode("ascii") + pkt)
+ self.rin.seek(0)
+
+ def update_refs(refs):
+ return {b"refs/heads/master": b"0" * 40}
+
+ def generate_pack_data(have, want, ofs_delta=False, progress=None):
+ return 0, []
+
+ result = self.client.send_pack(b"/", update_refs, generate_pack_data)
+ self.assertEqual(
+ result.ref_status,
+ {b"refs/heads/master": "remote does not support deleting refs"},
+ )
+ self.assertEqual(
+ result.refs,
+ {b"refs/heads/master": b"310ca9477129b8586fa2afc779c1f57cf64bba6c"},
+ )
+ self.assertEqual(self.rout.getvalue(), b"0000")
+
+
+class TestGetTransportAndPath(TestCase):
+ def test_tcp(self):
+ c, path = get_transport_and_path("git://foo.com/bar/baz")
+ self.assertIsInstance(c, TCPGitClient)
+ self.assertEqual("foo.com", c._host)
+ self.assertEqual(TCP_GIT_PORT, c._port)
+ self.assertEqual("/bar/baz", path)
+
+ def test_tcp_port(self):
+ c, path = get_transport_and_path("git://foo.com:1234/bar/baz")
+ self.assertIsInstance(c, TCPGitClient)
+ self.assertEqual("foo.com", c._host)
+ self.assertEqual(1234, c._port)
+ self.assertEqual("/bar/baz", path)
+
+ def test_git_ssh_explicit(self):
+ c, path = get_transport_and_path("git+ssh://foo.com/bar/baz")
+ self.assertIsInstance(c, SSHGitClient)
+ self.assertEqual("foo.com", c.host)
+ self.assertEqual(None, c.port)
+ self.assertEqual(None, c.username)
+ self.assertEqual("/bar/baz", path)
+
+ def test_ssh_explicit(self):
+ c, path = get_transport_and_path("ssh://foo.com/bar/baz")
+ self.assertIsInstance(c, SSHGitClient)
+ self.assertEqual("foo.com", c.host)
+ self.assertEqual(None, c.port)
+ self.assertEqual(None, c.username)
+ self.assertEqual("/bar/baz", path)
+
+ def test_ssh_port_explicit(self):
+ c, path = get_transport_and_path("git+ssh://foo.com:1234/bar/baz")
+ self.assertIsInstance(c, SSHGitClient)
+ self.assertEqual("foo.com", c.host)
+ self.assertEqual(1234, c.port)
+ self.assertEqual("/bar/baz", path)
+
+ def test_username_and_port_explicit_unknown_scheme(self):
+ c, path = get_transport_and_path("unknown://git@server:7999/dply/stuff.git")
+ self.assertIsInstance(c, SSHGitClient)
+ self.assertEqual("unknown", c.host)
+ self.assertEqual("//git@server:7999/dply/stuff.git", path)
+
+ def test_username_and_port_explicit(self):
+ c, path = get_transport_and_path("ssh://git@server:7999/dply/stuff.git")
+ self.assertIsInstance(c, SSHGitClient)
+ self.assertEqual("git", c.username)
+ self.assertEqual("server", c.host)
+ self.assertEqual(7999, c.port)
+ self.assertEqual("/dply/stuff.git", path)
+
+ def test_ssh_abspath_doubleslash(self):
+ c, path = get_transport_and_path("git+ssh://foo.com//bar/baz")
+ self.assertIsInstance(c, SSHGitClient)
+ self.assertEqual("foo.com", c.host)
+ self.assertEqual(None, c.port)
+ self.assertEqual(None, c.username)
+ self.assertEqual("//bar/baz", path)
+
+ def test_ssh_port(self):
+ c, path = get_transport_and_path("git+ssh://foo.com:1234/bar/baz")
+ self.assertIsInstance(c, SSHGitClient)
+ self.assertEqual("foo.com", c.host)
+ self.assertEqual(1234, c.port)
+ self.assertEqual("/bar/baz", path)
+
+ def test_ssh_implicit(self):
+ c, path = get_transport_and_path("foo:/bar/baz")
+ self.assertIsInstance(c, SSHGitClient)
+ self.assertEqual("foo", c.host)
+ self.assertEqual(None, c.port)
+ self.assertEqual(None, c.username)
+ self.assertEqual("/bar/baz", path)
+
+ def test_ssh_host(self):
+ c, path = get_transport_and_path("foo.com:/bar/baz")
+ self.assertIsInstance(c, SSHGitClient)
+ self.assertEqual("foo.com", c.host)
+ self.assertEqual(None, c.port)
+ self.assertEqual(None, c.username)
+ self.assertEqual("/bar/baz", path)
+
+ def test_ssh_user_host(self):
+ c, path = get_transport_and_path("user@foo.com:/bar/baz")
+ self.assertIsInstance(c, SSHGitClient)
+ self.assertEqual("foo.com", c.host)
+ self.assertEqual(None, c.port)
+ self.assertEqual("user", c.username)
+ self.assertEqual("/bar/baz", path)
+
+ def test_ssh_relpath(self):
+ c, path = get_transport_and_path("foo:bar/baz")
+ self.assertIsInstance(c, SSHGitClient)
+ self.assertEqual("foo", c.host)
+ self.assertEqual(None, c.port)
+ self.assertEqual(None, c.username)
+ self.assertEqual("bar/baz", path)
+
+ def test_ssh_host_relpath(self):
+ c, path = get_transport_and_path("foo.com:bar/baz")
+ self.assertIsInstance(c, SSHGitClient)
+ self.assertEqual("foo.com", c.host)
+ self.assertEqual(None, c.port)
+ self.assertEqual(None, c.username)
+ self.assertEqual("bar/baz", path)
+
+ def test_ssh_user_host_relpath(self):
+ c, path = get_transport_and_path("user@foo.com:bar/baz")
+ self.assertIsInstance(c, SSHGitClient)
+ self.assertEqual("foo.com", c.host)
+ self.assertEqual(None, c.port)
+ self.assertEqual("user", c.username)
+ self.assertEqual("bar/baz", path)
+
+ def test_local(self):
+ c, path = get_transport_and_path("foo.bar/baz")
+ self.assertIsInstance(c, LocalGitClient)
+ self.assertEqual("foo.bar/baz", path)
+
+ @skipIf(sys.platform != "win32", "Behaviour only happens on windows.")
+ def test_local_abs_windows_path(self):
+ c, path = get_transport_and_path("C:\\foo.bar\\baz")
+ self.assertIsInstance(c, LocalGitClient)
+ self.assertEqual("C:\\foo.bar\\baz", path)
+
+ def test_error(self):
+ # Need to use a known urlparse.uses_netloc URL scheme to get the
+ # expected parsing of the URL on Python versions less than 2.6.5
+ c, path = get_transport_and_path("prospero://bar/baz")
+ self.assertIsInstance(c, SSHGitClient)
+
+ def test_http(self):
+ url = "https://github.com/jelmer/dulwich"
+ c, path = get_transport_and_path(url)
+ self.assertIsInstance(c, HttpGitClient)
+ self.assertEqual("/jelmer/dulwich", path)
+
+ def test_http_auth(self):
+ url = "https://user:passwd@github.com/jelmer/dulwich"
+
+ c, path = get_transport_and_path(url)
+
+ self.assertIsInstance(c, HttpGitClient)
+ self.assertEqual("/jelmer/dulwich", path)
+ self.assertEqual("user", c._username)
+ self.assertEqual("passwd", c._password)
+
+ def test_http_auth_with_username(self):
+ url = "https://github.com/jelmer/dulwich"
+
+ c, path = get_transport_and_path(url, username="user2", password="blah")
+
+ self.assertIsInstance(c, HttpGitClient)
+ self.assertEqual("/jelmer/dulwich", path)
+ self.assertEqual("user2", c._username)
+ self.assertEqual("blah", c._password)
+
+ def test_http_auth_with_username_and_in_url(self):
+ url = "https://user:passwd@github.com/jelmer/dulwich"
+
+ c, path = get_transport_and_path(url, username="user2", password="blah")
+
+ self.assertIsInstance(c, HttpGitClient)
+ self.assertEqual("/jelmer/dulwich", path)
+ self.assertEqual("user", c._username)
+ self.assertEqual("passwd", c._password)
+
+ def test_http_no_auth(self):
+ url = "https://github.com/jelmer/dulwich"
+
+ c, path = get_transport_and_path(url)
+
+ self.assertIsInstance(c, HttpGitClient)
+ self.assertEqual("/jelmer/dulwich", path)
+ self.assertIs(None, c._username)
+ self.assertIs(None, c._password)
+
+
+class TestGetTransportAndPathFromUrl(TestCase):
+ def test_tcp(self):
+ c, path = get_transport_and_path_from_url("git://foo.com/bar/baz")
+ self.assertIsInstance(c, TCPGitClient)
+ self.assertEqual("foo.com", c._host)
+ self.assertEqual(TCP_GIT_PORT, c._port)
+ self.assertEqual("/bar/baz", path)
+
+ def test_tcp_port(self):
+ c, path = get_transport_and_path_from_url("git://foo.com:1234/bar/baz")
+ self.assertIsInstance(c, TCPGitClient)
+ self.assertEqual("foo.com", c._host)
+ self.assertEqual(1234, c._port)
+ self.assertEqual("/bar/baz", path)
+
+ def test_ssh_explicit(self):
+ c, path = get_transport_and_path_from_url("git+ssh://foo.com/bar/baz")
+ self.assertIsInstance(c, SSHGitClient)
+ self.assertEqual("foo.com", c.host)
+ self.assertEqual(None, c.port)
+ self.assertEqual(None, c.username)
+ self.assertEqual("/bar/baz", path)
+
+ def test_ssh_port_explicit(self):
+ c, path = get_transport_and_path_from_url("git+ssh://foo.com:1234/bar/baz")
+ self.assertIsInstance(c, SSHGitClient)
+ self.assertEqual("foo.com", c.host)
+ self.assertEqual(1234, c.port)
+ self.assertEqual("/bar/baz", path)
+
+ def test_ssh_homepath(self):
+ c, path = get_transport_and_path_from_url("git+ssh://foo.com/~/bar/baz")
+ self.assertIsInstance(c, SSHGitClient)
+ self.assertEqual("foo.com", c.host)
+ self.assertEqual(None, c.port)
+ self.assertEqual(None, c.username)
+ self.assertEqual("/~/bar/baz", path)
+
+ def test_ssh_port_homepath(self):
+ c, path = get_transport_and_path_from_url("git+ssh://foo.com:1234/~/bar/baz")
+ self.assertIsInstance(c, SSHGitClient)
+ self.assertEqual("foo.com", c.host)
+ self.assertEqual(1234, c.port)
+ self.assertEqual("/~/bar/baz", path)
+
+ def test_ssh_host_relpath(self):
+ self.assertRaises(
+ ValueError, get_transport_and_path_from_url, "foo.com:bar/baz"
+ )
+
+ def test_ssh_user_host_relpath(self):
+ self.assertRaises(
+ ValueError, get_transport_and_path_from_url, "user@foo.com:bar/baz"
+ )
+
+ def test_local_path(self):
+ self.assertRaises(ValueError, get_transport_and_path_from_url, "foo.bar/baz")
+
+ def test_error(self):
+ # Need to use a known urlparse.uses_netloc URL scheme to get the
+ # expected parsing of the URL on Python versions less than 2.6.5
+ self.assertRaises(
+ ValueError, get_transport_and_path_from_url, "prospero://bar/baz"
+ )
+
+ def test_http(self):
+ url = "https://github.com/jelmer/dulwich"
+ c, path = get_transport_and_path_from_url(url)
+ self.assertIsInstance(c, HttpGitClient)
+ self.assertEqual("https://github.com", c.get_url(b"/"))
+ self.assertEqual("/jelmer/dulwich", path)
+
+ def test_http_port(self):
+ url = "https://github.com:9090/jelmer/dulwich"
+ c, path = get_transport_and_path_from_url(url)
+ self.assertEqual("https://github.com:9090", c.get_url(b"/"))
+ self.assertIsInstance(c, HttpGitClient)
+ self.assertEqual("/jelmer/dulwich", path)
+
+ @patch("os.name", "posix")
+ @patch("sys.platform", "linux")
+ def test_file(self):
+ c, path = get_transport_and_path_from_url("file:///home/jelmer/foo")
+ self.assertIsInstance(c, LocalGitClient)
+ self.assertEqual("/home/jelmer/foo", path)
+
+ @patch("os.name", "nt")
+ @patch("sys.platform", "win32")
+ def test_file_win(self):
+ # `_win32_url_to_path` uses urllib.request.url2pathname, which is set to
+ # `ntutl2path.url2pathname` when `os.name==nt`
+ from nturl2path import url2pathname
+
+ with patch("dulwich.client.url2pathname", url2pathname):
+ expected = "C:\\foo.bar\\baz"
+ for file_url in [
+ "file:C:/foo.bar/baz",
+ "file:/C:/foo.bar/baz",
+ "file://C:/foo.bar/baz",
+ "file://C://foo.bar//baz",
+ "file:///C:/foo.bar/baz",
+ ]:
+ c, path = get_transport_and_path(file_url)
+ self.assertIsInstance(c, LocalGitClient)
+ self.assertEqual(path, expected)
+
+ for remote_url in [
+ "file://host.example.com/C:/foo.bar/baz"
+ "file://host.example.com/C:/foo.bar/baz"
+ "file:////host.example/foo.bar/baz",
+ ]:
+ with self.assertRaises(NotImplementedError):
+ c, path = get_transport_and_path(remote_url)
+
+
+class TestSSHVendor:
+ def __init__(self) -> None:
+ self.host = None
+ self.command = ""
+ self.username = None
+ self.port = None
+ self.password = None
+ self.key_filename = None
+
+ def run_command(
+ self,
+ host,
+ command,
+ username=None,
+ port=None,
+ password=None,
+ key_filename=None,
+ ssh_command=None,
+ ):
+ self.host = host
+ self.command = command
+ self.username = username
+ self.port = port
+ self.password = password
+ self.key_filename = key_filename
+ self.ssh_command = ssh_command
+
+ class Subprocess:
+ pass
+
+ Subprocess.read = lambda: None
+ Subprocess.write = lambda: None
+ Subprocess.close = lambda: None
+ Subprocess.can_read = lambda: None
+ return Subprocess()
+
+
+class SSHGitClientTests(TestCase):
+ def setUp(self):
+ super().setUp()
+
+ self.server = TestSSHVendor()
+ self.real_vendor = client.get_ssh_vendor
+ client.get_ssh_vendor = lambda: self.server
+
+ self.client = SSHGitClient("git.samba.org")
+
+ def tearDown(self):
+ super().tearDown()
+ client.get_ssh_vendor = self.real_vendor
+
+ def test_get_url(self):
+ path = "/tmp/repo.git"
+ c = SSHGitClient("git.samba.org")
+
+ url = c.get_url(path)
+ self.assertEqual("ssh://git.samba.org/tmp/repo.git", url)
+
+ def test_get_url_with_username_and_port(self):
+ path = "/tmp/repo.git"
+ c = SSHGitClient("git.samba.org", port=2222, username="user")
+
+ url = c.get_url(path)
+ self.assertEqual("ssh://user@git.samba.org:2222/tmp/repo.git", url)
+
+ def test_default_command(self):
+ self.assertEqual(b"git-upload-pack", self.client._get_cmd_path(b"upload-pack"))
+
+ def test_alternative_command_path(self):
+ self.client.alternative_paths[b"upload-pack"] = b"/usr/lib/git/git-upload-pack"
+ self.assertEqual(
+ b"/usr/lib/git/git-upload-pack",
+ self.client._get_cmd_path(b"upload-pack"),
+ )
+
+ def test_alternative_command_path_spaces(self):
+ self.client.alternative_paths[b"upload-pack"] = (
+ b"/usr/lib/git/git-upload-pack -ibla"
+ )
+ self.assertEqual(
+ b"/usr/lib/git/git-upload-pack -ibla",
+ self.client._get_cmd_path(b"upload-pack"),
+ )
+
+ def test_connect(self):
+ server = self.server
+ client = self.client
+
+ client.username = b"username"
+ client.port = 1337
+
+ client._connect(b"command", b"/path/to/repo")
+ self.assertEqual(b"username", server.username)
+ self.assertEqual(1337, server.port)
+ self.assertEqual("git-command '/path/to/repo'", server.command)
+
+ client._connect(b"relative-command", b"/~/path/to/repo")
+ self.assertEqual("git-relative-command '~/path/to/repo'", server.command)
+
+ def test_ssh_command_precedence(self):
+ self.overrideEnv("GIT_SSH", "/path/to/ssh")
+ test_client = SSHGitClient("git.samba.org")
+ self.assertEqual(test_client.ssh_command, "/path/to/ssh")
+
+ self.overrideEnv("GIT_SSH_COMMAND", "/path/to/ssh -o Option=Value")
+ test_client = SSHGitClient("git.samba.org")
+ self.assertEqual(test_client.ssh_command, "/path/to/ssh -o Option=Value")
+
+ test_client = SSHGitClient("git.samba.org", ssh_command="ssh -o Option1=Value1")
+ self.assertEqual(test_client.ssh_command, "ssh -o Option1=Value1")
+
+
+class ReportStatusParserTests(TestCase):
+ def test_invalid_pack(self):
+ parser = ReportStatusParser()
+ parser.handle_packet(b"unpack error - foo bar")
+ parser.handle_packet(b"ok refs/foo/bar")
+ parser.handle_packet(None)
+ self.assertRaises(SendPackError, list, parser.check())
+
+ def test_update_refs_error(self):
+ parser = ReportStatusParser()
+ parser.handle_packet(b"unpack ok")
+ parser.handle_packet(b"ng refs/foo/bar need to pull")
+ parser.handle_packet(None)
+ self.assertEqual([(b"refs/foo/bar", "need to pull")], list(parser.check()))
+
+ def test_ok(self):
+ parser = ReportStatusParser()
+ parser.handle_packet(b"unpack ok")
+ parser.handle_packet(b"ok refs/foo/bar")
+ parser.handle_packet(None)
+ self.assertEqual([(b"refs/foo/bar", None)], list(parser.check()))
+
+
+class LocalGitClientTests(TestCase):
+ def test_get_url(self):
+ path = "/tmp/repo.git"
+ c = LocalGitClient()
+
+ url = c.get_url(path)
+ self.assertEqual("file:///tmp/repo.git", url)
+
+ def test_fetch_into_empty(self):
+ c = LocalGitClient()
+ target = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, target)
+ t = Repo.init_bare(target)
+ self.addCleanup(t.close)
+ s = open_repo("a.git")
+ self.addCleanup(tear_down_repo, s)
+ self.assertEqual(s.get_refs(), c.fetch(s.path, t).refs)
+
+ def test_clone(self):
+ c = LocalGitClient()
+ s = open_repo("a.git")
+ self.addCleanup(tear_down_repo, s)
+ target = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, target)
+ result_repo = c.clone(s.path, target, mkdir=False)
+ self.addCleanup(result_repo.close)
+ expected = dict(s.get_refs())
+ expected[b"refs/remotes/origin/HEAD"] = expected[b"HEAD"]
+ expected[b"refs/remotes/origin/master"] = expected[b"refs/heads/master"]
+ self.assertEqual(expected, result_repo.get_refs())
+
+ def test_fetch_empty(self):
+ c = LocalGitClient()
+ s = open_repo("a.git")
+ self.addCleanup(tear_down_repo, s)
+ out = BytesIO()
+ walker = {}
+ ret = c.fetch_pack(
+ s.path, lambda heads, **kwargs: [], graph_walker=walker, pack_data=out.write
+ )
+ self.assertEqual(
+ {
+ b"HEAD": b"a90fa2d900a17e99b433217e988c4eb4a2e9a097",
+ b"refs/heads/master": b"a90fa2d900a17e99b433217e988c4eb4a2e9a097",
+ b"refs/tags/mytag": b"28237f4dc30d0d462658d6b937b08a0f0b6ef55a",
+ b"refs/tags/mytag-packed": b"b0931cadc54336e78a1d980420e3268903b57a50",
+ },
+ ret.refs,
+ )
+ self.assertEqual({b"HEAD": b"refs/heads/master"}, ret.symrefs)
+ self.assertEqual(
+ b"PACK\x00\x00\x00\x02\x00\x00\x00\x00\x02\x9d\x08"
+ b"\x82;\xd8\xa8\xea\xb5\x10\xadj\xc7\\\x82<\xfd>\xd3\x1e",
+ out.getvalue(),
+ )
+
+ def test_fetch_pack_none(self):
+ c = LocalGitClient()
+ s = open_repo("a.git")
+ self.addCleanup(tear_down_repo, s)
+ out = BytesIO()
+ walker = MemoryRepo().get_graph_walker()
+ ret = c.fetch_pack(
+ s.path,
+ lambda heads, **kwargs: [b"a90fa2d900a17e99b433217e988c4eb4a2e9a097"],
+ graph_walker=walker,
+ pack_data=out.write,
+ )
+ self.assertEqual({b"HEAD": b"refs/heads/master"}, ret.symrefs)
+ self.assertEqual(
+ {
+ b"HEAD": b"a90fa2d900a17e99b433217e988c4eb4a2e9a097",
+ b"refs/heads/master": b"a90fa2d900a17e99b433217e988c4eb4a2e9a097",
+ b"refs/tags/mytag": b"28237f4dc30d0d462658d6b937b08a0f0b6ef55a",
+ b"refs/tags/mytag-packed": b"b0931cadc54336e78a1d980420e3268903b57a50",
+ },
+ ret.refs,
+ )
+ # Hardcoding is not ideal, but we'll fix that some other day..
+ self.assertTrue(
+ out.getvalue().startswith(b"PACK\x00\x00\x00\x02\x00\x00\x00\x07")
+ )
+
+ def test_send_pack_without_changes(self):
+ local = open_repo("a.git")
+ self.addCleanup(tear_down_repo, local)
+
+ target = open_repo("a.git")
+ self.addCleanup(tear_down_repo, target)
+
+ self.send_and_verify(b"master", local, target)
+
+ def test_send_pack_with_changes(self):
+ local = open_repo("a.git")
+ self.addCleanup(tear_down_repo, local)
+
+ target_path = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, target_path)
+ with Repo.init_bare(target_path) as target:
+ self.send_and_verify(b"master", local, target)
+
+ def test_get_refs(self):
+ local = open_repo("refs.git")
+ self.addCleanup(tear_down_repo, local)
+
+ client = LocalGitClient()
+ refs = client.get_refs(local.path)
+ self.assertDictEqual(local.refs.as_dict(), refs)
+
+ def send_and_verify(self, branch, local, target):
+ """Send branch from local to remote repository and verify it worked."""
+ client = LocalGitClient()
+ ref_name = b"refs/heads/" + branch
+ result = client.send_pack(
+ target.path,
+ lambda _: {ref_name: local.refs[ref_name]},
+ local.generate_pack_data,
+ )
+
+ self.assertEqual(local.refs[ref_name], result.refs[ref_name])
+ self.assertIs(None, result.agent)
+ self.assertEqual({}, result.ref_status)
+
+ obj_local = local.get_object(result.refs[ref_name])
+ obj_target = target.get_object(result.refs[ref_name])
+ self.assertEqual(obj_local, obj_target)
+
+
+class HttpGitClientTests(TestCase):
+ def test_get_url(self):
+ base_url = "https://github.com/jelmer/dulwich"
+ path = "/jelmer/dulwich"
+ c = HttpGitClient(base_url)
+
+ url = c.get_url(path)
+ self.assertEqual("https://github.com/jelmer/dulwich", url)
+
+ def test_get_url_bytes_path(self):
+ base_url = "https://github.com/jelmer/dulwich"
+ path_bytes = b"/jelmer/dulwich"
+ c = HttpGitClient(base_url)
+
+ url = c.get_url(path_bytes)
+ self.assertEqual("https://github.com/jelmer/dulwich", url)
+
+ def test_get_url_with_username_and_passwd(self):
+ base_url = "https://github.com/jelmer/dulwich"
+ path = "/jelmer/dulwich"
+ c = HttpGitClient(base_url, username="USERNAME", password="PASSWD")
+
+ url = c.get_url(path)
+ self.assertEqual("https://github.com/jelmer/dulwich", url)
+
+ def test_init_username_passwd_set(self):
+ url = "https://github.com/jelmer/dulwich"
+
+ c = HttpGitClient(url, config=None, username="user", password="passwd")
+ self.assertEqual("user", c._username)
+ self.assertEqual("passwd", c._password)
+
+ basic_auth = c.pool_manager.headers["authorization"]
+ auth_string = "{}:{}".format("user", "passwd")
+ b64_credentials = base64.b64encode(auth_string.encode("latin1"))
+ expected_basic_auth = "Basic %s" % b64_credentials.decode("latin1")
+ self.assertEqual(basic_auth, expected_basic_auth)
+
+ def test_init_username_set_no_password(self):
+ url = "https://github.com/jelmer/dulwich"
+
+ c = HttpGitClient(url, config=None, username="user")
+ self.assertEqual("user", c._username)
+ self.assertIsNone(c._password)
+
+ basic_auth = c.pool_manager.headers["authorization"]
+ auth_string = b"user:"
+ b64_credentials = base64.b64encode(auth_string)
+ expected_basic_auth = f"Basic {b64_credentials.decode('ascii')}"
+ self.assertEqual(basic_auth, expected_basic_auth)
+
+ def test_init_no_username_passwd(self):
+ url = "https://github.com/jelmer/dulwich"
+
+ c = HttpGitClient(url, config=None)
+ self.assertIs(None, c._username)
+ self.assertIs(None, c._password)
+ self.assertNotIn("authorization", c.pool_manager.headers)
+
+ def test_from_parsedurl_username_only(self):
+ username = "user"
+ url = f"https://{username}@github.com/jelmer/dulwich"
+
+ c = HttpGitClient.from_parsedurl(urlparse(url))
+ self.assertEqual(c._username, username)
+ self.assertEqual(c._password, None)
+
+ basic_auth = c.pool_manager.headers["authorization"]
+ auth_string = username.encode("ascii") + b":"
+ b64_credentials = base64.b64encode(auth_string)
+ expected_basic_auth = f"Basic {b64_credentials.decode('ascii')}"
+ self.assertEqual(basic_auth, expected_basic_auth)
+
+ def test_from_parsedurl_on_url_with_quoted_credentials(self):
+ original_username = "john|the|first"
+ quoted_username = urlquote(original_username)
+
+ original_password = "Ya#1$2%3"
+ quoted_password = urlquote(original_password)
+
+ url = f"https://{quoted_username}:{quoted_password}@github.com/jelmer/dulwich"
+
+ c = HttpGitClient.from_parsedurl(urlparse(url))
+ self.assertEqual(original_username, c._username)
+ self.assertEqual(original_password, c._password)
+
+ basic_auth = c.pool_manager.headers["authorization"]
+ auth_string = f"{original_username}:{original_password}"
+ b64_credentials = base64.b64encode(auth_string.encode("latin1"))
+ expected_basic_auth = "Basic %s" % b64_credentials.decode("latin1")
+ self.assertEqual(basic_auth, expected_basic_auth)
+
+ def test_url_redirect_location(self):
+ from urllib3.response import HTTPResponse
+
+ test_data = {
+ "https://gitlab.com/inkscape/inkscape/": {
+ "location": "https://gitlab.com/inkscape/inkscape.git/",
+ "redirect_url": "https://gitlab.com/inkscape/inkscape.git/",
+ "refs_data": (
+ b"001e# service=git-upload-pack\n00000032"
+ b"fb2bebf4919a011f0fd7cec085443d0031228e76 "
+ b"HEAD\n0000"
+ ),
+ },
+ "https://github.com/jelmer/dulwich/": {
+ "location": "https://github.com/jelmer/dulwich/",
+ "redirect_url": "https://github.com/jelmer/dulwich/",
+ "refs_data": (
+ b"001e# service=git-upload-pack\n00000032"
+ b"3ff25e09724aa4d86ea5bca7d5dd0399a3c8bfcf "
+ b"HEAD\n0000"
+ ),
+ },
+ # check for absolute-path URI reference as location
+ "https://codeberg.org/ashwinvis/radicale-sh.git/": {
+ "location": "/ashwinvis/radicale-auth-sh/",
+ "redirect_url": "https://codeberg.org/ashwinvis/radicale-auth-sh/",
+ "refs_data": (
+ b"001e# service=git-upload-pack\n00000032"
+ b"470f8603768b608fc988675de2fae8f963c21158 "
+ b"HEAD\n0000"
+ ),
+ },
+ }
+
+ tail = "info/refs?service=git-upload-pack"
+
+ # we need to mock urllib3.PoolManager as this test will fail
+ # otherwise without an active internet connection
+ class PoolManagerMock:
+ def __init__(self) -> None:
+ self.headers: Dict[str, str] = {}
+
+ def request(
+ self,
+ method,
+ url,
+ fields=None,
+ headers=None,
+ redirect=True,
+ preload_content=True,
+ ):
+ base_url = url[: -len(tail)]
+ redirect_base_url = test_data[base_url]["location"]
+ redirect_url = redirect_base_url + tail
+ headers = {
+ "Content-Type": "application/x-git-upload-pack-advertisement"
+ }
+ body = test_data[base_url]["refs_data"]
+ # urllib3 handles automatic redirection by default
+ status = 200
+ request_url = redirect_url
+ # simulate urllib3 behavior when redirect parameter is False
+ if redirect is False:
+ request_url = url
+ if redirect_base_url != base_url:
+ body = b""
+ headers["location"] = test_data[base_url]["location"]
+ status = 301
+
+ return HTTPResponse(
+ body=BytesIO(body),
+ headers=headers,
+ request_method=method,
+ request_url=request_url,
+ preload_content=preload_content,
+ status=status,
+ )
+
+ pool_manager = PoolManagerMock()
+
+ for base_url in test_data.keys():
+ # instantiate HttpGitClient with mocked pool manager
+ c = HttpGitClient(base_url, pool_manager=pool_manager, config=None)
+ # call method that detects url redirection
+ _, _, processed_url = c._discover_references(b"git-upload-pack", base_url)
+
+ # send the same request as the method above without redirection
+ resp = c.pool_manager.request("GET", base_url + tail, redirect=False)
+
+ # check expected behavior of urllib3
+ redirect_location = resp.get_redirect_location()
+
+ if resp.status == 200:
+ self.assertFalse(redirect_location)
+
+ if redirect_location:
+ # check that url redirection has been correctly detected
+ self.assertEqual(processed_url, test_data[base_url]["redirect_url"])
+ else:
+ # check also the no redirection case
+ self.assertEqual(processed_url, base_url)
+
+ def test_smart_request_content_type_with_directive_check(self):
+ from urllib3.response import HTTPResponse
+
+ # we need to mock urllib3.PoolManager as this test will fail
+ # otherwise without an active internet connection
+ class PoolManagerMock:
+ def __init__(self) -> None:
+ self.headers: Dict[str, str] = {}
+
+ def request(
+ self,
+ method,
+ url,
+ fields=None,
+ headers=None,
+ redirect=True,
+ preload_content=True,
+ ):
+ return HTTPResponse(
+ headers={
+ "Content-Type": "application/x-git-upload-pack-result; charset=utf-8"
+ },
+ request_method=method,
+ request_url=url,
+ preload_content=preload_content,
+ status=200,
+ )
+
+ clone_url = "https://hacktivis.me/git/blog.git/"
+ client = HttpGitClient(clone_url, pool_manager=PoolManagerMock(), config=None)
+ self.assertTrue(client._smart_request("git-upload-pack", clone_url, data=None))
+
+
+class TCPGitClientTests(TestCase):
+ def test_get_url(self):
+ host = "github.com"
+ path = "/jelmer/dulwich"
+ c = TCPGitClient(host)
+
+ url = c.get_url(path)
+ self.assertEqual("git://github.com/jelmer/dulwich", url)
+
+ def test_get_url_with_port(self):
+ host = "github.com"
+ path = "/jelmer/dulwich"
+ port = 9090
+ c = TCPGitClient(host, port=port)
+
+ url = c.get_url(path)
+ self.assertEqual("git://github.com:9090/jelmer/dulwich", url)
+
+
+class DefaultUrllib3ManagerTest(TestCase):
+ def test_no_config(self):
+ manager = default_urllib3_manager(config=None)
+ self.assertEqual(manager.connection_pool_kw["cert_reqs"], "CERT_REQUIRED")
+
+ def test_config_no_proxy(self):
+ import urllib3
+
+ manager = default_urllib3_manager(config=ConfigDict())
+ self.assertNotIsInstance(manager, urllib3.ProxyManager)
+ self.assertIsInstance(manager, urllib3.PoolManager)
+
+ def test_config_no_proxy_custom_cls(self):
+ import urllib3
+
+ class CustomPoolManager(urllib3.PoolManager):
+ pass
+
+ manager = default_urllib3_manager(
+ config=ConfigDict(), pool_manager_cls=CustomPoolManager
+ )
+ self.assertIsInstance(manager, CustomPoolManager)
+
+ def test_config_ssl(self):
+ config = ConfigDict()
+ config.set(b"http", b"sslVerify", b"true")
+ manager = default_urllib3_manager(config=config)
+ self.assertEqual(manager.connection_pool_kw["cert_reqs"], "CERT_REQUIRED")
+
+ def test_config_no_ssl(self):
+ config = ConfigDict()
+ config.set(b"http", b"sslVerify", b"false")
+ manager = default_urllib3_manager(config=config)
+ self.assertEqual(manager.connection_pool_kw["cert_reqs"], "CERT_NONE")
+
+ def test_config_proxy(self):
+ import urllib3
+
+ config = ConfigDict()
+ config.set(b"http", b"proxy", b"http://localhost:3128/")
+ manager = default_urllib3_manager(config=config)
+
+ self.assertIsInstance(manager, urllib3.ProxyManager)
+ self.assertTrue(hasattr(manager, "proxy"))
+ self.assertEqual(manager.proxy.scheme, "http")
+ self.assertEqual(manager.proxy.host, "localhost")
+ self.assertEqual(manager.proxy.port, 3128)
+
+ def test_environment_proxy(self):
+ import urllib3
+
+ config = ConfigDict()
+ self.overrideEnv("http_proxy", "http://myproxy:8080")
+ manager = default_urllib3_manager(config=config)
+ self.assertIsInstance(manager, urllib3.ProxyManager)
+ self.assertTrue(hasattr(manager, "proxy"))
+ self.assertEqual(manager.proxy.scheme, "http")
+ self.assertEqual(manager.proxy.host, "myproxy")
+ self.assertEqual(manager.proxy.port, 8080)
+
+ def test_environment_empty_proxy(self):
+ import urllib3
+
+ config = ConfigDict()
+ self.overrideEnv("http_proxy", "")
+ manager = default_urllib3_manager(config=config)
+ self.assertNotIsInstance(manager, urllib3.ProxyManager)
+ self.assertIsInstance(manager, urllib3.PoolManager)
+
+ def test_environment_no_proxy_1(self):
+ import urllib3
+
+ config = ConfigDict()
+ self.overrideEnv("http_proxy", "http://myproxy:8080")
+ self.overrideEnv("no_proxy", "xyz,abc.def.gh,abc.gh")
+ base_url = "http://xyz.abc.def.gh:8080/path/port"
+ manager = default_urllib3_manager(config=config, base_url=base_url)
+ self.assertNotIsInstance(manager, urllib3.ProxyManager)
+ self.assertIsInstance(manager, urllib3.PoolManager)
+
+ def test_environment_no_proxy_2(self):
+ import urllib3
+
+ config = ConfigDict()
+ self.overrideEnv("http_proxy", "http://myproxy:8080")
+ self.overrideEnv("no_proxy", "xyz,abc.def.gh,abc.gh,ample.com")
+ base_url = "http://ample.com/path/port"
+ manager = default_urllib3_manager(config=config, base_url=base_url)
+ self.assertNotIsInstance(manager, urllib3.ProxyManager)
+ self.assertIsInstance(manager, urllib3.PoolManager)
+
+ def test_environment_no_proxy_3(self):
+ import urllib3
+
+ config = ConfigDict()
+ self.overrideEnv("http_proxy", "http://myproxy:8080")
+ self.overrideEnv("no_proxy", "xyz,abc.def.gh,abc.gh,ample.com")
+ base_url = "http://ample.com:80/path/port"
+ manager = default_urllib3_manager(config=config, base_url=base_url)
+ self.assertNotIsInstance(manager, urllib3.ProxyManager)
+ self.assertIsInstance(manager, urllib3.PoolManager)
+
+ def test_environment_no_proxy_4(self):
+ import urllib3
+
+ config = ConfigDict()
+ self.overrideEnv("http_proxy", "http://myproxy:8080")
+ self.overrideEnv("no_proxy", "xyz,abc.def.gh,abc.gh,ample.com")
+ base_url = "http://www.ample.com/path/port"
+ manager = default_urllib3_manager(config=config, base_url=base_url)
+ self.assertNotIsInstance(manager, urllib3.ProxyManager)
+ self.assertIsInstance(manager, urllib3.PoolManager)
+
+ def test_environment_no_proxy_5(self):
+ import urllib3
+
+ config = ConfigDict()
+ self.overrideEnv("http_proxy", "http://myproxy:8080")
+ self.overrideEnv("no_proxy", "xyz,abc.def.gh,abc.gh,ample.com")
+ base_url = "http://www.example.com/path/port"
+ manager = default_urllib3_manager(config=config, base_url=base_url)
+ self.assertIsInstance(manager, urllib3.ProxyManager)
+ self.assertTrue(hasattr(manager, "proxy"))
+ self.assertEqual(manager.proxy.scheme, "http")
+ self.assertEqual(manager.proxy.host, "myproxy")
+ self.assertEqual(manager.proxy.port, 8080)
+
+ def test_environment_no_proxy_6(self):
+ import urllib3
+
+ config = ConfigDict()
+ self.overrideEnv("http_proxy", "http://myproxy:8080")
+ self.overrideEnv("no_proxy", "xyz,abc.def.gh,abc.gh,ample.com")
+ base_url = "http://ample.com.org/path/port"
+ manager = default_urllib3_manager(config=config, base_url=base_url)
+ self.assertIsInstance(manager, urllib3.ProxyManager)
+ self.assertTrue(hasattr(manager, "proxy"))
+ self.assertEqual(manager.proxy.scheme, "http")
+ self.assertEqual(manager.proxy.host, "myproxy")
+ self.assertEqual(manager.proxy.port, 8080)
+
+ def test_environment_no_proxy_ipv4_address_1(self):
+ import urllib3
+
+ config = ConfigDict()
+ self.overrideEnv("http_proxy", "http://myproxy:8080")
+ self.overrideEnv("no_proxy", "xyz,abc.def.gh,192.168.0.10,ample.com")
+ base_url = "http://192.168.0.10/path/port"
+ manager = default_urllib3_manager(config=config, base_url=base_url)
+ self.assertNotIsInstance(manager, urllib3.ProxyManager)
+ self.assertIsInstance(manager, urllib3.PoolManager)
+
+ def test_environment_no_proxy_ipv4_address_2(self):
+ import urllib3
+
+ config = ConfigDict()
+ self.overrideEnv("http_proxy", "http://myproxy:8080")
+ self.overrideEnv("no_proxy", "xyz,abc.def.gh,192.168.0.10,ample.com")
+ base_url = "http://192.168.0.10:8888/path/port"
+ manager = default_urllib3_manager(config=config, base_url=base_url)
+ self.assertNotIsInstance(manager, urllib3.ProxyManager)
+ self.assertIsInstance(manager, urllib3.PoolManager)
+
+ def test_environment_no_proxy_ipv4_address_3(self):
+ import urllib3
+
+ config = ConfigDict()
+ self.overrideEnv("http_proxy", "http://myproxy:8080")
+ self.overrideEnv(
+ "no_proxy", "xyz,abc.def.gh,ff80:1::/64,192.168.0.0/24,ample.com"
+ )
+ base_url = "http://192.168.0.10/path/port"
+ manager = default_urllib3_manager(config=config, base_url=base_url)
+ self.assertNotIsInstance(manager, urllib3.ProxyManager)
+ self.assertIsInstance(manager, urllib3.PoolManager)
+
+ def test_environment_no_proxy_ipv6_address_1(self):
+ import urllib3
+
+ config = ConfigDict()
+ self.overrideEnv("http_proxy", "http://myproxy:8080")
+ self.overrideEnv("no_proxy", "xyz,abc.def.gh,ff80:1::affe,ample.com")
+ base_url = "http://[ff80:1::affe]/path/port"
+ manager = default_urllib3_manager(config=config, base_url=base_url)
+ self.assertNotIsInstance(manager, urllib3.ProxyManager)
+ self.assertIsInstance(manager, urllib3.PoolManager)
+
+ def test_environment_no_proxy_ipv6_address_2(self):
+ import urllib3
+
+ config = ConfigDict()
+ self.overrideEnv("http_proxy", "http://myproxy:8080")
+ self.overrideEnv("no_proxy", "xyz,abc.def.gh,ff80:1::affe,ample.com")
+ base_url = "http://[ff80:1::affe]:1234/path/port"
+ manager = default_urllib3_manager(config=config, base_url=base_url)
+ self.assertNotIsInstance(manager, urllib3.ProxyManager)
+ self.assertIsInstance(manager, urllib3.PoolManager)
+
+ def test_environment_no_proxy_ipv6_address_3(self):
+ import urllib3
+
+ config = ConfigDict()
+ self.overrideEnv("http_proxy", "http://myproxy:8080")
+ self.overrideEnv(
+ "no_proxy", "xyz,abc.def.gh,192.168.0.0/24,ff80:1::/64,ample.com"
+ )
+ base_url = "http://[ff80:1::affe]/path/port"
+ manager = default_urllib3_manager(config=config, base_url=base_url)
+ self.assertNotIsInstance(manager, urllib3.ProxyManager)
+ self.assertIsInstance(manager, urllib3.PoolManager)
+
+ def test_config_proxy_custom_cls(self):
+ import urllib3
+
+ class CustomProxyManager(urllib3.ProxyManager):
+ pass
+
+ config = ConfigDict()
+ config.set(b"http", b"proxy", b"http://localhost:3128/")
+ manager = default_urllib3_manager(
+ config=config, proxy_manager_cls=CustomProxyManager
+ )
+ self.assertIsInstance(manager, CustomProxyManager)
+
+ def test_config_proxy_creds(self):
+ import urllib3
+
+ config = ConfigDict()
+ config.set(b"http", b"proxy", b"http://jelmer:example@localhost:3128/")
+ manager = default_urllib3_manager(config=config)
+ assert isinstance(manager, urllib3.ProxyManager)
+ self.assertEqual(
+ manager.proxy_headers, {"proxy-authorization": "Basic amVsbWVyOmV4YW1wbGU="}
+ )
+
+ def test_config_no_verify_ssl(self):
+ manager = default_urllib3_manager(config=None, cert_reqs="CERT_NONE")
+ self.assertEqual(manager.connection_pool_kw["cert_reqs"], "CERT_NONE")
+
+
+class SubprocessSSHVendorTests(TestCase):
+ def setUp(self):
+ # Monkey Patch client subprocess popen
+ self._orig_popen = dulwich.client.subprocess.Popen
+ dulwich.client.subprocess.Popen = DummyPopen
+
+ def tearDown(self):
+ dulwich.client.subprocess.Popen = self._orig_popen
+
+ def test_run_command_dashes(self):
+ vendor = SubprocessSSHVendor()
+ self.assertRaises(
+ StrangeHostname,
+ vendor.run_command,
+ "--weird-host",
+ "git-clone-url",
+ )
+
+ def test_run_command_password(self):
+ vendor = SubprocessSSHVendor()
+ self.assertRaises(
+ NotImplementedError,
+ vendor.run_command,
+ "host",
+ "git-clone-url",
+ password="12345",
+ )
+
+ def test_run_command_password_and_privkey(self):
+ vendor = SubprocessSSHVendor()
+ self.assertRaises(
+ NotImplementedError,
+ vendor.run_command,
+ "host",
+ "git-clone-url",
+ password="12345",
+ key_filename="/tmp/id_rsa",
+ )
+
+ def test_run_command_with_port_username_and_privkey(self):
+ expected = [
+ "ssh",
+ "-x",
+ "-p",
+ "2200",
+ "-i",
+ "/tmp/id_rsa",
+ "user@host",
+ "git-clone-url",
+ ]
+
+ vendor = SubprocessSSHVendor()
+ command = vendor.run_command(
+ "host",
+ "git-clone-url",
+ username="user",
+ port="2200",
+ key_filename="/tmp/id_rsa",
+ )
+
+ args = command.proc.args
+
+ self.assertListEqual(expected, args[0])
+
+ def test_run_with_ssh_command(self):
+ expected = [
+ "/path/to/ssh",
+ "-o",
+ "Option=Value",
+ "-x",
+ "host",
+ "git-clone-url",
+ ]
+
+ vendor = SubprocessSSHVendor()
+ command = vendor.run_command(
+ "host",
+ "git-clone-url",
+ ssh_command="/path/to/ssh -o Option=Value",
+ )
+
+ args = command.proc.args
+ self.assertListEqual(expected, args[0])
+
+
+class PLinkSSHVendorTests(TestCase):
+ def setUp(self):
+ # Monkey Patch client subprocess popen
+ self._orig_popen = dulwich.client.subprocess.Popen
+ dulwich.client.subprocess.Popen = DummyPopen
+
+ def tearDown(self):
+ dulwich.client.subprocess.Popen = self._orig_popen
+
+ def test_run_command_dashes(self):
+ vendor = PLinkSSHVendor()
+ self.assertRaises(
+ StrangeHostname,
+ vendor.run_command,
+ "--weird-host",
+ "git-clone-url",
+ )
+
+ def test_run_command_password_and_privkey(self):
+ vendor = PLinkSSHVendor()
+
+ warnings.simplefilter("always", UserWarning)
+ self.addCleanup(warnings.resetwarnings)
+ warnings_list, restore_warnings = setup_warning_catcher()
+ self.addCleanup(restore_warnings)
+
+ command = vendor.run_command(
+ "host",
+ "git-clone-url",
+ password="12345",
+ key_filename="/tmp/id_rsa",
+ )
+
+ expected_warning = UserWarning(
+ "Invoking PLink with a password exposes the password in the "
+ "process list."
+ )
+
+ for w in warnings_list:
+ if type(w) is type(expected_warning) and w.args == expected_warning.args:
+ break
+ else:
+ raise AssertionError(
+ f"Expected warning {expected_warning!r} not in {warnings_list!r}"
+ )
+
+ args = command.proc.args
+
+ if sys.platform == "win32":
+ binary = ["plink.exe", "-ssh"]
+ else:
+ binary = ["plink", "-ssh"]
+ expected = [
+ *binary,
+ "-pw",
+ "12345",
+ "-i",
+ "/tmp/id_rsa",
+ "host",
+ "git-clone-url",
+ ]
+ self.assertListEqual(expected, args[0])
+
+ def test_run_command_password(self):
+ if sys.platform == "win32":
+ binary = ["plink.exe", "-ssh"]
+ else:
+ binary = ["plink", "-ssh"]
+ expected = [*binary, "-pw", "12345", "host", "git-clone-url"]
+
+ vendor = PLinkSSHVendor()
+
+ warnings.simplefilter("always", UserWarning)
+ self.addCleanup(warnings.resetwarnings)
+ warnings_list, restore_warnings = setup_warning_catcher()
+ self.addCleanup(restore_warnings)
+
+ command = vendor.run_command("host", "git-clone-url", password="12345")
+
+ expected_warning = UserWarning(
+ "Invoking PLink with a password exposes the password in the "
+ "process list."
+ )
+
+ for w in warnings_list:
+ if type(w) is type(expected_warning) and w.args == expected_warning.args:
+ break
+ else:
+ raise AssertionError(
+ f"Expected warning {expected_warning!r} not in {warnings_list!r}"
+ )
+
+ args = command.proc.args
+
+ self.assertListEqual(expected, args[0])
+
+ def test_run_command_with_port_username_and_privkey(self):
+ if sys.platform == "win32":
+ binary = ["plink.exe", "-ssh"]
+ else:
+ binary = ["plink", "-ssh"]
+ expected = [
+ *binary,
+ "-P",
+ "2200",
+ "-i",
+ "/tmp/id_rsa",
+ "user@host",
+ "git-clone-url",
+ ]
+
+ vendor = PLinkSSHVendor()
+ command = vendor.run_command(
+ "host",
+ "git-clone-url",
+ username="user",
+ port="2200",
+ key_filename="/tmp/id_rsa",
+ )
+
+ args = command.proc.args
+
+ self.assertListEqual(expected, args[0])
+
+ def test_run_with_ssh_command(self):
+ expected = [
+ "/path/to/plink",
+ "-x",
+ "host",
+ "git-clone-url",
+ ]
+
+ vendor = SubprocessSSHVendor()
+ command = vendor.run_command(
+ "host",
+ "git-clone-url",
+ ssh_command="/path/to/plink",
+ )
+
+ args = command.proc.args
+ self.assertListEqual(expected, args[0])
+
+
+class RsyncUrlTests(TestCase):
+ def test_simple(self):
+ self.assertEqual(parse_rsync_url("foo:bar/path"), (None, "foo", "bar/path"))
+ self.assertEqual(
+ parse_rsync_url("user@foo:bar/path"), ("user", "foo", "bar/path")
+ )
+
+ def test_path(self):
+ self.assertRaises(ValueError, parse_rsync_url, "/path")
+
+
+class CheckWantsTests(TestCase):
+ def test_fine(self):
+ check_wants(
+ [b"2f3dc7a53fb752a6961d3a56683df46d4d3bf262"],
+ {b"refs/heads/blah": b"2f3dc7a53fb752a6961d3a56683df46d4d3bf262"},
+ )
+
+ def test_missing(self):
+ self.assertRaises(
+ InvalidWants,
+ check_wants,
+ [b"2f3dc7a53fb752a6961d3a56683df46d4d3bf262"],
+ {b"refs/heads/blah": b"3f3dc7a53fb752a6961d3a56683df46d4d3bf262"},
+ )
+
+ def test_annotated(self):
+ self.assertRaises(
+ InvalidWants,
+ check_wants,
+ [b"2f3dc7a53fb752a6961d3a56683df46d4d3bf262"],
+ {
+ b"refs/heads/blah": b"3f3dc7a53fb752a6961d3a56683df46d4d3bf262",
+ b"refs/heads/blah^{}": b"2f3dc7a53fb752a6961d3a56683df46d4d3bf262",
+ },
+ )
+
+
+class FetchPackResultTests(TestCase):
+ def test_eq(self):
+ self.assertEqual(
+ FetchPackResult(
+ {b"refs/heads/master": b"2f3dc7a53fb752a6961d3a56683df46d4d3bf262"},
+ {},
+ b"user/agent",
+ ),
+ FetchPackResult(
+ {b"refs/heads/master": b"2f3dc7a53fb752a6961d3a56683df46d4d3bf262"},
+ {},
+ b"user/agent",
+ ),
+ )
+
+
+class GitCredentialStoreTests(TestCase):
+ @classmethod
+ def setUpClass(cls):
+ with tempfile.NamedTemporaryFile(delete=False) as f:
+ f.write(b"https://user:pass@example.org\n")
+ cls.fname = f.name
+
+ @classmethod
+ def tearDownClass(cls):
+ os.unlink(cls.fname)
+
+ def test_nonmatching_scheme(self):
+ self.assertEqual(
+ get_credentials_from_store(b"http", b"example.org", fnames=[self.fname]),
+ None,
+ )
+
+ def test_nonmatching_hostname(self):
+ self.assertEqual(
+ get_credentials_from_store(b"https", b"noentry.org", fnames=[self.fname]),
+ None,
+ )
+
+ def test_match_without_username(self):
+ self.assertEqual(
+ get_credentials_from_store(b"https", b"example.org", fnames=[self.fname]),
+ (b"user", b"pass"),
+ )
+
+ def test_match_with_matching_username(self):
+ self.assertEqual(
+ get_credentials_from_store(
+ b"https", b"example.org", b"user", fnames=[self.fname]
+ ),
+ (b"user", b"pass"),
+ )
+
+ def test_no_match_with_nonmatching_username(self):
+ self.assertEqual(
+ get_credentials_from_store(
+ b"https", b"example.org", b"otheruser", fnames=[self.fname]
+ ),
+ None,
+ )
+
+
+class RemoteErrorFromStderrTests(TestCase):
+ def test_nothing(self):
+ self.assertEqual(_remote_error_from_stderr(None), HangupException())
+
+ def test_error_line(self):
+ b = BytesIO(
+ b"""\
+This is some random output.
+ERROR: This is the actual error
+with a tail
+"""
+ )
+ self.assertEqual(
+ _remote_error_from_stderr(b),
+ GitProtocolError("This is the actual error"),
+ )
+
+ def test_no_error_line(self):
+ b = BytesIO(
+ b"""\
+This is output without an error line.
+And this line is just random noise, too.
+"""
+ )
+ self.assertEqual(
+ _remote_error_from_stderr(b),
+ HangupException(
+ [
+ b"This is output without an error line.",
+ b"And this line is just random noise, too.",
+ ]
+ ),
+ )
blob - /dev/null
blob + dd985227bcf8c1e1c739ed63f615bc842478592b (mode 644)
--- /dev/null
+++ tests/test_config.py
+# test_config.py -- Tests for reading and writing configuration files
+# Copyright (C) 2011 Jelmer Vernooij <jelmer@jelmer.uk>
+#
+# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
+# General Public License as public by the Free Software Foundation; version 2.0
+# or (at your option) any later version. You can redistribute it and/or
+# modify it under the terms of either of these two licenses.
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# You should have received a copy of the licenses; if not, see
+# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
+# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
+# License, Version 2.0.
+#
+
+"""Tests for reading and writing configuration files."""
+
+import os
+import sys
+from io import BytesIO
+from unittest import skipIf
+from unittest.mock import patch
+
+from dulwich.config import (
+ ConfigDict,
+ ConfigFile,
+ StackedConfig,
+ _check_section_name,
+ _check_variable_name,
+ _escape_value,
+ _format_string,
+ _parse_string,
+ apply_instead_of,
+ parse_submodules,
+)
+
+from . import TestCase
+
+
+class ConfigFileTests(TestCase):
+ def from_file(self, text):
+ return ConfigFile.from_file(BytesIO(text))
+
+ def test_empty(self):
+ ConfigFile()
+
+ def test_eq(self):
+ self.assertEqual(ConfigFile(), ConfigFile())
+
+ def test_default_config(self):
+ cf = self.from_file(
+ b"""[core]
+\trepositoryformatversion = 0
+\tfilemode = true
+\tbare = false
+\tlogallrefupdates = true
+"""
+ )
+ self.assertEqual(
+ ConfigFile(
+ {
+ (b"core",): {
+ b"repositoryformatversion": b"0",
+ b"filemode": b"true",
+ b"bare": b"false",
+ b"logallrefupdates": b"true",
+ }
+ }
+ ),
+ cf,
+ )
+
+ def test_from_file_empty(self):
+ cf = self.from_file(b"")
+ self.assertEqual(ConfigFile(), cf)
+
+ def test_empty_line_before_section(self):
+ cf = self.from_file(b"\n[section]\n")
+ self.assertEqual(ConfigFile({(b"section",): {}}), cf)
+
+ def test_comment_before_section(self):
+ cf = self.from_file(b"# foo\n[section]\n")
+ self.assertEqual(ConfigFile({(b"section",): {}}), cf)
+
+ def test_comment_after_section(self):
+ cf = self.from_file(b"[section] # foo\n")
+ self.assertEqual(ConfigFile({(b"section",): {}}), cf)
+
+ def test_comment_after_variable(self):
+ cf = self.from_file(b"[section]\nbar= foo # a comment\n")
+ self.assertEqual(ConfigFile({(b"section",): {b"bar": b"foo"}}), cf)
+
+ def test_comment_character_within_value_string(self):
+ cf = self.from_file(b'[section]\nbar= "foo#bar"\n')
+ self.assertEqual(ConfigFile({(b"section",): {b"bar": b"foo#bar"}}), cf)
+
+ def test_comment_character_within_section_string(self):
+ cf = self.from_file(b'[branch "foo#bar"] # a comment\nbar= foo\n')
+ self.assertEqual(ConfigFile({(b"branch", b"foo#bar"): {b"bar": b"foo"}}), cf)
+
+ def test_closing_bracket_within_section_string(self):
+ cf = self.from_file(b'[branch "foo]bar"] # a comment\nbar= foo\n')
+ self.assertEqual(ConfigFile({(b"branch", b"foo]bar"): {b"bar": b"foo"}}), cf)
+
+ def test_from_file_section(self):
+ cf = self.from_file(b"[core]\nfoo = bar\n")
+ self.assertEqual(b"bar", cf.get((b"core",), b"foo"))
+ self.assertEqual(b"bar", cf.get((b"core", b"foo"), b"foo"))
+
+ def test_from_file_multiple(self):
+ cf = self.from_file(b"[core]\nfoo = bar\nfoo = blah\n")
+ self.assertEqual([b"bar", b"blah"], list(cf.get_multivar((b"core",), b"foo")))
+ self.assertEqual([], list(cf.get_multivar((b"core",), b"blah")))
+
+ def test_from_file_utf8_bom(self):
+ text = "[core]\nfoo = b\u00e4r\n".encode("utf-8-sig")
+ cf = self.from_file(text)
+ self.assertEqual(b"b\xc3\xa4r", cf.get((b"core",), b"foo"))
+
+ def test_from_file_section_case_insensitive_lower(self):
+ cf = self.from_file(b"[cOre]\nfOo = bar\n")
+ self.assertEqual(b"bar", cf.get((b"core",), b"foo"))
+ self.assertEqual(b"bar", cf.get((b"core", b"foo"), b"foo"))
+
+ def test_from_file_section_case_insensitive_mixed(self):
+ cf = self.from_file(b"[cOre]\nfOo = bar\n")
+ self.assertEqual(b"bar", cf.get((b"core",), b"fOo"))
+ self.assertEqual(b"bar", cf.get((b"cOre", b"fOo"), b"fOo"))
+
+ def test_from_file_with_mixed_quoted(self):
+ cf = self.from_file(b'[core]\nfoo = "bar"la\n')
+ self.assertEqual(b"barla", cf.get((b"core",), b"foo"))
+
+ def test_from_file_section_with_open_brackets(self):
+ self.assertRaises(ValueError, self.from_file, b"[core\nfoo = bar\n")
+
+ def test_from_file_value_with_open_quoted(self):
+ self.assertRaises(ValueError, self.from_file, b'[core]\nfoo = "bar\n')
+
+ def test_from_file_with_quotes(self):
+ cf = self.from_file(b"[core]\n" b'foo = " bar"\n')
+ self.assertEqual(b" bar", cf.get((b"core",), b"foo"))
+
+ def test_from_file_with_interrupted_line(self):
+ cf = self.from_file(b"[core]\n" b"foo = bar\\\n" b" la\n")
+ self.assertEqual(b"barla", cf.get((b"core",), b"foo"))
+
+ def test_from_file_with_boolean_setting(self):
+ cf = self.from_file(b"[core]\n" b"foo\n")
+ self.assertEqual(b"true", cf.get((b"core",), b"foo"))
+
+ def test_from_file_subsection(self):
+ cf = self.from_file(b'[branch "foo"]\nfoo = bar\n')
+ self.assertEqual(b"bar", cf.get((b"branch", b"foo"), b"foo"))
+
+ def test_from_file_subsection_invalid(self):
+ self.assertRaises(ValueError, self.from_file, b'[branch "foo]\nfoo = bar\n')
+
+ def test_from_file_subsection_not_quoted(self):
+ cf = self.from_file(b"[branch.foo]\nfoo = bar\n")
+ self.assertEqual(b"bar", cf.get((b"branch", b"foo"), b"foo"))
+
+ def test_write_preserve_multivar(self):
+ cf = self.from_file(b"[core]\nfoo = bar\nfoo = blah\n")
+ f = BytesIO()
+ cf.write_to_file(f)
+ self.assertEqual(b"[core]\n\tfoo = bar\n\tfoo = blah\n", f.getvalue())
+
+ def test_write_to_file_empty(self):
+ c = ConfigFile()
+ f = BytesIO()
+ c.write_to_file(f)
+ self.assertEqual(b"", f.getvalue())
+
+ def test_write_to_file_section(self):
+ c = ConfigFile()
+ c.set((b"core",), b"foo", b"bar")
+ f = BytesIO()
+ c.write_to_file(f)
+ self.assertEqual(b"[core]\n\tfoo = bar\n", f.getvalue())
+
+ def test_write_to_file_subsection(self):
+ c = ConfigFile()
+ c.set((b"branch", b"blie"), b"foo", b"bar")
+ f = BytesIO()
+ c.write_to_file(f)
+ self.assertEqual(b'[branch "blie"]\n\tfoo = bar\n', f.getvalue())
+
+ def test_same_line(self):
+ cf = self.from_file(b"[branch.foo] foo = bar\n")
+ self.assertEqual(b"bar", cf.get((b"branch", b"foo"), b"foo"))
+
+ def test_quoted_newlines_windows(self):
+ cf = self.from_file(
+ b"[alias]\r\n"
+ b"c = '!f() { \\\r\n"
+ b' printf \'[git commit -m \\"%s\\"]\\n\' \\"$*\\" && \\\r\n'
+ b' git commit -m \\"$*\\"; \\\r\n'
+ b" }; f'\r\n"
+ )
+ self.assertEqual(list(cf.sections()), [(b"alias",)])
+ self.assertEqual(
+ b"'!f() { printf '[git commit -m \"%s\"]\n' " b'"$*" && git commit -m "$*"',
+ cf.get((b"alias",), b"c"),
+ )
+
+ def test_quoted(self):
+ cf = self.from_file(
+ b"""[gui]
+\tfontdiff = -family \\\"Ubuntu Mono\\\" -size 11 -overstrike 0
+"""
+ )
+ self.assertEqual(
+ ConfigFile(
+ {
+ (b"gui",): {
+ b"fontdiff": b'-family "Ubuntu Mono" -size 11 -overstrike 0',
+ }
+ }
+ ),
+ cf,
+ )
+
+ def test_quoted_multiline(self):
+ cf = self.from_file(
+ b"""[alias]
+who = \"!who() {\\
+ git log --no-merges --pretty=format:'%an - %ae' $@ | uniq -c | sort -rn;\\
+};\\
+who\"
+"""
+ )
+ self.assertEqual(
+ ConfigFile(
+ {
+ (b"alias",): {
+ b"who": (
+ b"!who() {git log --no-merges --pretty=format:'%an - "
+ b"%ae' $@ | uniq -c | sort -rn;};who"
+ )
+ }
+ }
+ ),
+ cf,
+ )
+
+ def test_set_hash_gets_quoted(self):
+ c = ConfigFile()
+ c.set(b"xandikos", b"color", b"#665544")
+ f = BytesIO()
+ c.write_to_file(f)
+ self.assertEqual(b'[xandikos]\n\tcolor = "#665544"\n', f.getvalue())
+
+
+class ConfigDictTests(TestCase):
+ def test_get_set(self):
+ cd = ConfigDict()
+ self.assertRaises(KeyError, cd.get, b"foo", b"core")
+ cd.set((b"core",), b"foo", b"bla")
+ self.assertEqual(b"bla", cd.get((b"core",), b"foo"))
+ cd.set((b"core",), b"foo", b"bloe")
+ self.assertEqual(b"bloe", cd.get((b"core",), b"foo"))
+
+ def test_get_boolean(self):
+ cd = ConfigDict()
+ cd.set((b"core",), b"foo", b"true")
+ self.assertTrue(cd.get_boolean((b"core",), b"foo"))
+ cd.set((b"core",), b"foo", b"false")
+ self.assertFalse(cd.get_boolean((b"core",), b"foo"))
+ cd.set((b"core",), b"foo", b"invalid")
+ self.assertRaises(ValueError, cd.get_boolean, (b"core",), b"foo")
+
+ def test_dict(self):
+ cd = ConfigDict()
+ cd.set((b"core",), b"foo", b"bla")
+ cd.set((b"core2",), b"foo", b"bloe")
+
+ self.assertEqual([(b"core",), (b"core2",)], list(cd.keys()))
+ self.assertEqual(cd[(b"core",)], {b"foo": b"bla"})
+
+ cd[b"a"] = b"b"
+ self.assertEqual(cd[b"a"], b"b")
+
+ def test_items(self):
+ cd = ConfigDict()
+ cd.set((b"core",), b"foo", b"bla")
+ cd.set((b"core2",), b"foo", b"bloe")
+
+ self.assertEqual([(b"foo", b"bla")], list(cd.items((b"core",))))
+
+ def test_items_nonexistant(self):
+ cd = ConfigDict()
+ cd.set((b"core2",), b"foo", b"bloe")
+
+ self.assertEqual([], list(cd.items((b"core",))))
+
+ def test_sections(self):
+ cd = ConfigDict()
+ cd.set((b"core2",), b"foo", b"bloe")
+
+ self.assertEqual([(b"core2",)], list(cd.sections()))
+
+
+class StackedConfigTests(TestCase):
+ def test_default_backends(self):
+ StackedConfig.default_backends()
+
+ @skipIf(sys.platform != "win32", "Windows specific config location.")
+ def test_windows_config_from_path(self):
+ from ..config import get_win_system_paths
+
+ install_dir = os.path.join("C:", "foo", "Git")
+ self.overrideEnv("PATH", os.path.join(install_dir, "cmd"))
+ with patch("os.path.exists", return_value=True):
+ paths = set(get_win_system_paths())
+ self.assertEqual(
+ {
+ os.path.join(os.environ.get("PROGRAMDATA"), "Git", "config"),
+ os.path.join(install_dir, "etc", "gitconfig"),
+ },
+ paths,
+ )
+
+ @skipIf(sys.platform != "win32", "Windows specific config location.")
+ def test_windows_config_from_reg(self):
+ import winreg
+
+ from ..config import get_win_system_paths
+
+ self.overrideEnv("PATH", None)
+ install_dir = os.path.join("C:", "foo", "Git")
+ with patch("winreg.OpenKey"):
+ with patch(
+ "winreg.QueryValueEx",
+ return_value=(install_dir, winreg.REG_SZ),
+ ):
+ paths = set(get_win_system_paths())
+ self.assertEqual(
+ {
+ os.path.join(os.environ.get("PROGRAMDATA"), "Git", "config"),
+ os.path.join(install_dir, "etc", "gitconfig"),
+ },
+ paths,
+ )
+
+
+class EscapeValueTests(TestCase):
+ def test_nothing(self):
+ self.assertEqual(b"foo", _escape_value(b"foo"))
+
+ def test_backslash(self):
+ self.assertEqual(b"foo\\\\", _escape_value(b"foo\\"))
+
+ def test_newline(self):
+ self.assertEqual(b"foo\\n", _escape_value(b"foo\n"))
+
+
+class FormatStringTests(TestCase):
+ def test_quoted(self):
+ self.assertEqual(b'" foo"', _format_string(b" foo"))
+ self.assertEqual(b'"\\tfoo"', _format_string(b"\tfoo"))
+
+ def test_not_quoted(self):
+ self.assertEqual(b"foo", _format_string(b"foo"))
+ self.assertEqual(b"foo bar", _format_string(b"foo bar"))
+
+
+class ParseStringTests(TestCase):
+ def test_quoted(self):
+ self.assertEqual(b" foo", _parse_string(b'" foo"'))
+ self.assertEqual(b"\tfoo", _parse_string(b'"\\tfoo"'))
+
+ def test_not_quoted(self):
+ self.assertEqual(b"foo", _parse_string(b"foo"))
+ self.assertEqual(b"foo bar", _parse_string(b"foo bar"))
+
+ def test_nothing(self):
+ self.assertEqual(b"", _parse_string(b""))
+
+ def test_tab(self):
+ self.assertEqual(b"\tbar\t", _parse_string(b"\\tbar\\t"))
+
+ def test_newline(self):
+ self.assertEqual(b"\nbar\t", _parse_string(b"\\nbar\\t\t"))
+
+ def test_quote(self):
+ self.assertEqual(b'"foo"', _parse_string(b'\\"foo\\"'))
+
+
+class CheckVariableNameTests(TestCase):
+ def test_invalid(self):
+ self.assertFalse(_check_variable_name(b"foo "))
+ self.assertFalse(_check_variable_name(b"bar,bar"))
+ self.assertFalse(_check_variable_name(b"bar.bar"))
+
+ def test_valid(self):
+ self.assertTrue(_check_variable_name(b"FOO"))
+ self.assertTrue(_check_variable_name(b"foo"))
+ self.assertTrue(_check_variable_name(b"foo-bar"))
+
+
+class CheckSectionNameTests(TestCase):
+ def test_invalid(self):
+ self.assertFalse(_check_section_name(b"foo "))
+ self.assertFalse(_check_section_name(b"bar,bar"))
+
+ def test_valid(self):
+ self.assertTrue(_check_section_name(b"FOO"))
+ self.assertTrue(_check_section_name(b"foo"))
+ self.assertTrue(_check_section_name(b"foo-bar"))
+ self.assertTrue(_check_section_name(b"bar.bar"))
+
+
+class SubmodulesTests(TestCase):
+ def testSubmodules(self):
+ cf = ConfigFile.from_file(
+ BytesIO(
+ b"""\
+[submodule "core/lib"]
+\tpath = core/lib
+\turl = https://github.com/phhusson/QuasselC.git
+"""
+ )
+ )
+ got = list(parse_submodules(cf))
+ self.assertEqual(
+ [
+ (
+ b"core/lib",
+ b"https://github.com/phhusson/QuasselC.git",
+ b"core/lib",
+ )
+ ],
+ got,
+ )
+
+ def testMalformedSubmodules(self):
+ cf = ConfigFile.from_file(
+ BytesIO(
+ b"""\
+[submodule "core/lib"]
+\tpath = core/lib
+\turl = https://github.com/phhusson/QuasselC.git
+
+[submodule "dulwich"]
+\turl = https://github.com/jelmer/dulwich
+"""
+ )
+ )
+ got = list(parse_submodules(cf))
+ self.assertEqual(
+ [
+ (
+ b"core/lib",
+ b"https://github.com/phhusson/QuasselC.git",
+ b"core/lib",
+ )
+ ],
+ got,
+ )
+
+
+class ApplyInsteadOfTests(TestCase):
+ def test_none(self):
+ config = ConfigDict()
+ self.assertEqual(
+ "https://example.com/", apply_instead_of(config, "https://example.com/")
+ )
+
+ def test_apply(self):
+ config = ConfigDict()
+ config.set(("url", "https://samba.org/"), "insteadOf", "https://example.com/")
+ self.assertEqual(
+ "https://samba.org/", apply_instead_of(config, "https://example.com/")
+ )
+
+ def test_apply_multiple(self):
+ config = ConfigDict()
+ config.set(("url", "https://samba.org/"), "insteadOf", "https://blah.com/")
+ config.set(("url", "https://samba.org/"), "insteadOf", "https://example.com/")
+ self.assertEqual(
+ [b"https://blah.com/", b"https://example.com/"],
+ list(config.get_multivar(("url", "https://samba.org/"), "insteadOf")),
+ )
+ self.assertEqual(
+ "https://samba.org/", apply_instead_of(config, "https://example.com/")
+ )
blob - /dev/null
blob + a677d7d7939754a73bd9fa307799a3c74fa8edbf (mode 644)
--- /dev/null
+++ tests/test_credentials.py
+# test_credentials.py -- tests for credentials.py
+
+# Copyright (C) 2022 Daniele Trifirò <daniele@iterative.ai>
+#
+# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
+# General Public License as public by the Free Software Foundation; version 2.0
+# or (at your option) any later version. You can redistribute it and/or
+# modify it under the terms of either of these two licenses.
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# You should have received a copy of the licenses; if not, see
+# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
+# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
+# License, Version 2.0.
+#
+
+from urllib.parse import urlparse
+
+from dulwich.config import ConfigDict
+from dulwich.credentials import (
+ match_partial_url,
+ match_urls,
+ urlmatch_credential_sections,
+)
+
+from . import TestCase
+
+
+class TestCredentialHelpersUtils(TestCase):
+ def test_match_urls(self):
+ url = urlparse("https://github.com/jelmer/dulwich/")
+ url_1 = urlparse("https://github.com/jelmer/dulwich")
+ url_2 = urlparse("https://github.com/jelmer")
+ url_3 = urlparse("https://github.com")
+ self.assertTrue(match_urls(url, url_1))
+ self.assertTrue(match_urls(url, url_2))
+ self.assertTrue(match_urls(url, url_3))
+
+ non_matching = urlparse("https://git.sr.ht/")
+ self.assertFalse(match_urls(url, non_matching))
+
+ def test_match_partial_url(self):
+ url = urlparse("https://github.com/jelmer/dulwich/")
+ self.assertTrue(match_partial_url(url, "github.com"))
+ self.assertFalse(match_partial_url(url, "github.com/jelmer/"))
+ self.assertTrue(match_partial_url(url, "github.com/jelmer/dulwich"))
+ self.assertFalse(match_partial_url(url, "github.com/jel"))
+ self.assertFalse(match_partial_url(url, "github.com/jel/"))
+
+ def test_urlmatch_credential_sections(self):
+ config = ConfigDict()
+ config.set((b"credential", "https://github.com"), b"helper", "foo")
+ config.set((b"credential", "git.sr.ht"), b"helper", "foo")
+ config.set(b"credential", b"helper", "bar")
+
+ self.assertEqual(
+ list(urlmatch_credential_sections(config, "https://github.com")),
+ [
+ (b"credential", b"https://github.com"),
+ (b"credential",),
+ ],
+ )
+
+ self.assertEqual(
+ list(urlmatch_credential_sections(config, "https://git.sr.ht")),
+ [
+ (b"credential", b"git.sr.ht"),
+ (b"credential",),
+ ],
+ )
+
+ self.assertEqual(
+ list(urlmatch_credential_sections(config, "missing_url")),
+ [(b"credential",)],
+ )
blob - /dev/null
blob + 04c83d053ffecff9cf77799285e182bcfecd06bb (mode 644)
--- /dev/null
+++ tests/test_diff_tree.py
+# test_diff_tree.py -- Tests for file and tree diff utilities.
+# Copyright (C) 2010 Google, Inc.
+#
+# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
+# General Public License as public by the Free Software Foundation; version 2.0
+# or (at your option) any later version. You can redistribute it and/or
+# modify it under the terms of either of these two licenses.
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# You should have received a copy of the licenses; if not, see
+# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
+# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
+# License, Version 2.0.
+#
+
+"""Tests for file and tree diff utilities."""
+
+from itertools import permutations
+
+from dulwich.diff_tree import (
+ CHANGE_COPY,
+ CHANGE_MODIFY,
+ CHANGE_RENAME,
+ CHANGE_UNCHANGED,
+ RenameDetector,
+ TreeChange,
+ _count_blocks,
+ _count_blocks_py,
+ _is_tree,
+ _is_tree_py,
+ _merge_entries,
+ _merge_entries_py,
+ _similarity_score,
+ _tree_change_key,
+ tree_changes,
+ tree_changes_for_merge,
+)
+from dulwich.index import commit_tree
+from dulwich.object_store import MemoryObjectStore
+from dulwich.objects import Blob, ShaFile, Tree, TreeEntry
+
+from . import TestCase
+from .utils import F, ext_functest_builder, functest_builder, make_object
+
+
+class DiffTestCase(TestCase):
+ def setUp(self):
+ super().setUp()
+ self.store = MemoryObjectStore()
+ self.empty_tree = self.commit_tree([])
+
+ def commit_tree(self, entries):
+ commit_blobs = []
+ for entry in entries:
+ if len(entry) == 2:
+ path, obj = entry
+ mode = F
+ else:
+ path, obj, mode = entry
+ if isinstance(obj, Blob):
+ self.store.add_object(obj)
+ sha = obj.id
+ else:
+ sha = obj
+ commit_blobs.append((path, sha, mode))
+ return self.store[commit_tree(self.store, commit_blobs)]
+
+
+class TreeChangesTest(DiffTestCase):
+ def setUp(self):
+ super().setUp()
+ self.detector = RenameDetector(self.store)
+
+ def assertMergeFails(self, merge_entries, name, mode, sha):
+ t = Tree()
+ t[name] = (mode, sha)
+ self.assertRaises((TypeError, ValueError), merge_entries, "", t, t)
+
+ def _do_test_merge_entries(self, merge_entries):
+ blob_a1 = make_object(Blob, data=b"a1")
+ blob_a2 = make_object(Blob, data=b"a2")
+ blob_b1 = make_object(Blob, data=b"b1")
+ blob_c2 = make_object(Blob, data=b"c2")
+ tree1 = self.commit_tree([(b"a", blob_a1, 0o100644), (b"b", blob_b1, 0o100755)])
+ tree2 = self.commit_tree([(b"a", blob_a2, 0o100644), (b"c", blob_c2, 0o100755)])
+
+ self.assertEqual([], merge_entries(b"", self.empty_tree, self.empty_tree))
+ self.assertEqual(
+ [
+ ((None, None, None), (b"a", 0o100644, blob_a1.id)),
+ ((None, None, None), (b"b", 0o100755, blob_b1.id)),
+ ],
+ merge_entries(b"", self.empty_tree, tree1),
+ )
+ self.assertEqual(
+ [
+ ((None, None, None), (b"x/a", 0o100644, blob_a1.id)),
+ ((None, None, None), (b"x/b", 0o100755, blob_b1.id)),
+ ],
+ merge_entries(b"x", self.empty_tree, tree1),
+ )
+
+ self.assertEqual(
+ [
+ ((b"a", 0o100644, blob_a2.id), (None, None, None)),
+ ((b"c", 0o100755, blob_c2.id), (None, None, None)),
+ ],
+ merge_entries(b"", tree2, self.empty_tree),
+ )
+
+ self.assertEqual(
+ [
+ ((b"a", 0o100644, blob_a1.id), (b"a", 0o100644, blob_a2.id)),
+ ((b"b", 0o100755, blob_b1.id), (None, None, None)),
+ ((None, None, None), (b"c", 0o100755, blob_c2.id)),
+ ],
+ merge_entries(b"", tree1, tree2),
+ )
+
+ self.assertEqual(
+ [
+ ((b"a", 0o100644, blob_a2.id), (b"a", 0o100644, blob_a1.id)),
+ ((None, None, None), (b"b", 0o100755, blob_b1.id)),
+ ((b"c", 0o100755, blob_c2.id), (None, None, None)),
+ ],
+ merge_entries(b"", tree2, tree1),
+ )
+
+ self.assertMergeFails(merge_entries, 0xDEADBEEF, 0o100644, "1" * 40)
+ self.assertMergeFails(merge_entries, b"a", b"deadbeef", "1" * 40)
+ self.assertMergeFails(merge_entries, b"a", 0o100644, 0xDEADBEEF)
+
+ test_merge_entries = functest_builder(_do_test_merge_entries, _merge_entries_py)
+ test_merge_entries_extension = ext_functest_builder(
+ _do_test_merge_entries, _merge_entries
+ )
+
+ def _do_test_is_tree(self, is_tree):
+ self.assertFalse(is_tree(TreeEntry(None, None, None)))
+ self.assertFalse(is_tree(TreeEntry(b"a", 0o100644, b"a" * 40)))
+ self.assertFalse(is_tree(TreeEntry(b"a", 0o100755, b"a" * 40)))
+ self.assertFalse(is_tree(TreeEntry(b"a", 0o120000, b"a" * 40)))
+ self.assertTrue(is_tree(TreeEntry(b"a", 0o040000, b"a" * 40)))
+ self.assertRaises(TypeError, is_tree, TreeEntry(b"a", b"x", b"a" * 40))
+ self.assertRaises(AttributeError, is_tree, 1234)
+
+ test_is_tree = functest_builder(_do_test_is_tree, _is_tree_py)
+ test_is_tree_extension = ext_functest_builder(_do_test_is_tree, _is_tree)
+
+ def assertChangesEqual(self, expected, tree1, tree2, **kwargs):
+ actual = list(tree_changes(self.store, tree1.id, tree2.id, **kwargs))
+ self.assertEqual(expected, actual)
+
+ # For brevity, the following tests use tuples instead of TreeEntry objects.
+
+ def test_tree_changes_empty(self):
+ self.assertChangesEqual([], self.empty_tree, self.empty_tree)
+
+ def test_tree_changes_no_changes(self):
+ blob = make_object(Blob, data=b"blob")
+ tree = self.commit_tree([(b"a", blob), (b"b/c", blob)])
+ self.assertChangesEqual([], self.empty_tree, self.empty_tree)
+ self.assertChangesEqual([], tree, tree)
+ self.assertChangesEqual(
+ [
+ TreeChange(CHANGE_UNCHANGED, (b"a", F, blob.id), (b"a", F, blob.id)),
+ TreeChange(
+ CHANGE_UNCHANGED,
+ (b"b/c", F, blob.id),
+ (b"b/c", F, blob.id),
+ ),
+ ],
+ tree,
+ tree,
+ want_unchanged=True,
+ )
+
+ def test_tree_changes_add_delete(self):
+ blob_a = make_object(Blob, data=b"a")
+ blob_b = make_object(Blob, data=b"b")
+ tree = self.commit_tree([(b"a", blob_a, 0o100644), (b"x/b", blob_b, 0o100755)])
+ self.assertChangesEqual(
+ [
+ TreeChange.add((b"a", 0o100644, blob_a.id)),
+ TreeChange.add((b"x/b", 0o100755, blob_b.id)),
+ ],
+ self.empty_tree,
+ tree,
+ )
+ self.assertChangesEqual(
+ [
+ TreeChange.delete((b"a", 0o100644, blob_a.id)),
+ TreeChange.delete((b"x/b", 0o100755, blob_b.id)),
+ ],
+ tree,
+ self.empty_tree,
+ )
+
+ def test_tree_changes_modify_contents(self):
+ blob_a1 = make_object(Blob, data=b"a1")
+ blob_a2 = make_object(Blob, data=b"a2")
+ tree1 = self.commit_tree([(b"a", blob_a1)])
+ tree2 = self.commit_tree([(b"a", blob_a2)])
+ self.assertChangesEqual(
+ [TreeChange(CHANGE_MODIFY, (b"a", F, blob_a1.id), (b"a", F, blob_a2.id))],
+ tree1,
+ tree2,
+ )
+
+ def test_tree_changes_modify_mode(self):
+ blob_a = make_object(Blob, data=b"a")
+ tree1 = self.commit_tree([(b"a", blob_a, 0o100644)])
+ tree2 = self.commit_tree([(b"a", blob_a, 0o100755)])
+ self.assertChangesEqual(
+ [
+ TreeChange(
+ CHANGE_MODIFY,
+ (b"a", 0o100644, blob_a.id),
+ (b"a", 0o100755, blob_a.id),
+ )
+ ],
+ tree1,
+ tree2,
+ )
+
+ def test_tree_changes_change_type(self):
+ blob_a1 = make_object(Blob, data=b"a")
+ blob_a2 = make_object(Blob, data=b"/foo/bar")
+ tree1 = self.commit_tree([(b"a", blob_a1, 0o100644)])
+ tree2 = self.commit_tree([(b"a", blob_a2, 0o120000)])
+ self.assertChangesEqual(
+ [
+ TreeChange.delete((b"a", 0o100644, blob_a1.id)),
+ TreeChange.add((b"a", 0o120000, blob_a2.id)),
+ ],
+ tree1,
+ tree2,
+ )
+
+ def test_tree_changes_change_type_same(self):
+ blob_a1 = make_object(Blob, data=b"a")
+ blob_a2 = make_object(Blob, data=b"/foo/bar")
+ tree1 = self.commit_tree([(b"a", blob_a1, 0o100644)])
+ tree2 = self.commit_tree([(b"a", blob_a2, 0o120000)])
+ self.assertChangesEqual(
+ [
+ TreeChange(
+ CHANGE_MODIFY,
+ (b"a", 0o100644, blob_a1.id),
+ (b"a", 0o120000, blob_a2.id),
+ )
+ ],
+ tree1,
+ tree2,
+ change_type_same=True,
+ )
+
+ def test_tree_changes_to_tree(self):
+ blob_a = make_object(Blob, data=b"a")
+ blob_x = make_object(Blob, data=b"x")
+ tree1 = self.commit_tree([(b"a", blob_a)])
+ tree2 = self.commit_tree([(b"a/x", blob_x)])
+ self.assertChangesEqual(
+ [
+ TreeChange.delete((b"a", F, blob_a.id)),
+ TreeChange.add((b"a/x", F, blob_x.id)),
+ ],
+ tree1,
+ tree2,
+ )
+
+ def test_tree_changes_complex(self):
+ blob_a_1 = make_object(Blob, data=b"a1_1")
+ blob_bx1_1 = make_object(Blob, data=b"bx1_1")
+ blob_bx2_1 = make_object(Blob, data=b"bx2_1")
+ blob_by1_1 = make_object(Blob, data=b"by1_1")
+ blob_by2_1 = make_object(Blob, data=b"by2_1")
+ tree1 = self.commit_tree(
+ [
+ (b"a", blob_a_1),
+ (b"b/x/1", blob_bx1_1),
+ (b"b/x/2", blob_bx2_1),
+ (b"b/y/1", blob_by1_1),
+ (b"b/y/2", blob_by2_1),
+ ]
+ )
+
+ blob_a_2 = make_object(Blob, data=b"a1_2")
+ blob_bx1_2 = blob_bx1_1
+ blob_by_2 = make_object(Blob, data=b"by_2")
+ blob_c_2 = make_object(Blob, data=b"c_2")
+ tree2 = self.commit_tree(
+ [
+ (b"a", blob_a_2),
+ (b"b/x/1", blob_bx1_2),
+ (b"b/y", blob_by_2),
+ (b"c", blob_c_2),
+ ]
+ )
+
+ self.assertChangesEqual(
+ [
+ TreeChange(
+ CHANGE_MODIFY,
+ (b"a", F, blob_a_1.id),
+ (b"a", F, blob_a_2.id),
+ ),
+ TreeChange.delete((b"b/x/2", F, blob_bx2_1.id)),
+ TreeChange.add((b"b/y", F, blob_by_2.id)),
+ TreeChange.delete((b"b/y/1", F, blob_by1_1.id)),
+ TreeChange.delete((b"b/y/2", F, blob_by2_1.id)),
+ TreeChange.add((b"c", F, blob_c_2.id)),
+ ],
+ tree1,
+ tree2,
+ )
+
+ def test_tree_changes_name_order(self):
+ blob = make_object(Blob, data=b"a")
+ tree1 = self.commit_tree([(b"a", blob), (b"a.", blob), (b"a..", blob)])
+ # Tree order is the reverse of this, so if we used tree order, 'a..'
+ # would not be merged.
+ tree2 = self.commit_tree([(b"a/x", blob), (b"a./x", blob), (b"a..", blob)])
+
+ self.assertChangesEqual(
+ [
+ TreeChange.delete((b"a", F, blob.id)),
+ TreeChange.add((b"a/x", F, blob.id)),
+ TreeChange.delete((b"a.", F, blob.id)),
+ TreeChange.add((b"a./x", F, blob.id)),
+ ],
+ tree1,
+ tree2,
+ )
+
+ def test_tree_changes_prune(self):
+ blob_a1 = make_object(Blob, data=b"a1")
+ blob_a2 = make_object(Blob, data=b"a2")
+ blob_x = make_object(Blob, data=b"x")
+ tree1 = self.commit_tree([(b"a", blob_a1), (b"b/x", blob_x)])
+ tree2 = self.commit_tree([(b"a", blob_a2), (b"b/x", blob_x)])
+ # Remove identical items so lookups will fail unless we prune.
+ subtree = self.store[tree1[b"b"][1]]
+ for entry in subtree.items():
+ del self.store[entry.sha]
+ del self.store[subtree.id]
+
+ self.assertChangesEqual(
+ [TreeChange(CHANGE_MODIFY, (b"a", F, blob_a1.id), (b"a", F, blob_a2.id))],
+ tree1,
+ tree2,
+ )
+
+ def test_tree_changes_rename_detector(self):
+ blob_a1 = make_object(Blob, data=b"a\nb\nc\nd\n")
+ blob_a2 = make_object(Blob, data=b"a\nb\nc\ne\n")
+ blob_b = make_object(Blob, data=b"b")
+ tree1 = self.commit_tree([(b"a", blob_a1), (b"b", blob_b)])
+ tree2 = self.commit_tree([(b"c", blob_a2), (b"b", blob_b)])
+ detector = RenameDetector(self.store)
+
+ self.assertChangesEqual(
+ [
+ TreeChange.delete((b"a", F, blob_a1.id)),
+ TreeChange.add((b"c", F, blob_a2.id)),
+ ],
+ tree1,
+ tree2,
+ )
+ self.assertChangesEqual(
+ [
+ TreeChange.delete((b"a", F, blob_a1.id)),
+ TreeChange(
+ CHANGE_UNCHANGED,
+ (b"b", F, blob_b.id),
+ (b"b", F, blob_b.id),
+ ),
+ TreeChange.add((b"c", F, blob_a2.id)),
+ ],
+ tree1,
+ tree2,
+ want_unchanged=True,
+ )
+ self.assertChangesEqual(
+ [TreeChange(CHANGE_RENAME, (b"a", F, blob_a1.id), (b"c", F, blob_a2.id))],
+ tree1,
+ tree2,
+ rename_detector=detector,
+ )
+ self.assertChangesEqual(
+ [
+ TreeChange(CHANGE_RENAME, (b"a", F, blob_a1.id), (b"c", F, blob_a2.id)),
+ TreeChange(
+ CHANGE_UNCHANGED,
+ (b"b", F, blob_b.id),
+ (b"b", F, blob_b.id),
+ ),
+ ],
+ tree1,
+ tree2,
+ rename_detector=detector,
+ want_unchanged=True,
+ )
+
+ def assertChangesForMergeEqual(self, expected, parent_trees, merge_tree, **kwargs):
+ parent_tree_ids = [t.id for t in parent_trees]
+ actual = list(
+ tree_changes_for_merge(self.store, parent_tree_ids, merge_tree.id, **kwargs)
+ )
+ self.assertEqual(expected, actual)
+
+ parent_tree_ids.reverse()
+ expected = [list(reversed(cs)) for cs in expected]
+ actual = list(
+ tree_changes_for_merge(self.store, parent_tree_ids, merge_tree.id, **kwargs)
+ )
+ self.assertEqual(expected, actual)
+
+ def test_tree_changes_for_merge_add_no_conflict(self):
+ blob = make_object(Blob, data=b"blob")
+ parent1 = self.commit_tree([])
+ parent2 = merge = self.commit_tree([(b"a", blob)])
+ self.assertChangesForMergeEqual([], [parent1, parent2], merge)
+ self.assertChangesForMergeEqual([], [parent2, parent2], merge)
+
+ def test_tree_changes_for_merge_add_modify_conflict(self):
+ blob1 = make_object(Blob, data=b"1")
+ blob2 = make_object(Blob, data=b"2")
+ parent1 = self.commit_tree([])
+ parent2 = self.commit_tree([(b"a", blob1)])
+ merge = self.commit_tree([(b"a", blob2)])
+ self.assertChangesForMergeEqual(
+ [
+ [
+ TreeChange.add((b"a", F, blob2.id)),
+ TreeChange(CHANGE_MODIFY, (b"a", F, blob1.id), (b"a", F, blob2.id)),
+ ]
+ ],
+ [parent1, parent2],
+ merge,
+ )
+
+ def test_tree_changes_for_merge_modify_modify_conflict(self):
+ blob1 = make_object(Blob, data=b"1")
+ blob2 = make_object(Blob, data=b"2")
+ blob3 = make_object(Blob, data=b"3")
+ parent1 = self.commit_tree([(b"a", blob1)])
+ parent2 = self.commit_tree([(b"a", blob2)])
+ merge = self.commit_tree([(b"a", blob3)])
+ self.assertChangesForMergeEqual(
+ [
+ [
+ TreeChange(CHANGE_MODIFY, (b"a", F, blob1.id), (b"a", F, blob3.id)),
+ TreeChange(CHANGE_MODIFY, (b"a", F, blob2.id), (b"a", F, blob3.id)),
+ ]
+ ],
+ [parent1, parent2],
+ merge,
+ )
+
+ def test_tree_changes_for_merge_modify_no_conflict(self):
+ blob1 = make_object(Blob, data=b"1")
+ blob2 = make_object(Blob, data=b"2")
+ parent1 = self.commit_tree([(b"a", blob1)])
+ parent2 = merge = self.commit_tree([(b"a", blob2)])
+ self.assertChangesForMergeEqual([], [parent1, parent2], merge)
+
+ def test_tree_changes_for_merge_delete_delete_conflict(self):
+ blob1 = make_object(Blob, data=b"1")
+ blob2 = make_object(Blob, data=b"2")
+ parent1 = self.commit_tree([(b"a", blob1)])
+ parent2 = self.commit_tree([(b"a", blob2)])
+ merge = self.commit_tree([])
+ self.assertChangesForMergeEqual(
+ [
+ [
+ TreeChange.delete((b"a", F, blob1.id)),
+ TreeChange.delete((b"a", F, blob2.id)),
+ ]
+ ],
+ [parent1, parent2],
+ merge,
+ )
+
+ def test_tree_changes_for_merge_delete_no_conflict(self):
+ blob = make_object(Blob, data=b"blob")
+ has = self.commit_tree([(b"a", blob)])
+ doesnt_have = self.commit_tree([])
+ self.assertChangesForMergeEqual([], [has, has], doesnt_have)
+ self.assertChangesForMergeEqual([], [has, doesnt_have], doesnt_have)
+
+ def test_tree_changes_for_merge_octopus_no_conflict(self):
+ r = list(range(5))
+ blobs = [make_object(Blob, data=bytes(i)) for i in r]
+ parents = [self.commit_tree([(b"a", blobs[i])]) for i in r]
+ for i in r:
+ # Take the SHA from each of the parents.
+ self.assertChangesForMergeEqual([], parents, parents[i])
+
+ def test_tree_changes_for_merge_octopus_modify_conflict(self):
+ # Because the octopus merge strategy is limited, I doubt it's possible
+ # to create this with the git command line. But the output is well-
+ # defined, so test it anyway.
+ r = list(range(5))
+ parent_blobs = [make_object(Blob, data=bytes(i)) for i in r]
+ merge_blob = make_object(Blob, data=b"merge")
+ parents = [self.commit_tree([(b"a", parent_blobs[i])]) for i in r]
+ merge = self.commit_tree([(b"a", merge_blob)])
+ expected = [
+ [
+ TreeChange(
+ CHANGE_MODIFY,
+ (b"a", F, parent_blobs[i].id),
+ (b"a", F, merge_blob.id),
+ )
+ for i in r
+ ]
+ ]
+ self.assertChangesForMergeEqual(expected, parents, merge)
+
+ def test_tree_changes_for_merge_octopus_delete(self):
+ blob1 = make_object(Blob, data=b"1")
+ blob2 = make_object(Blob, data=b"3")
+ parent1 = self.commit_tree([(b"a", blob1)])
+ parent2 = self.commit_tree([(b"a", blob2)])
+ parent3 = merge = self.commit_tree([])
+ self.assertChangesForMergeEqual([], [parent1, parent1, parent1], merge)
+ self.assertChangesForMergeEqual([], [parent1, parent1, parent3], merge)
+ self.assertChangesForMergeEqual([], [parent1, parent3, parent3], merge)
+ self.assertChangesForMergeEqual(
+ [
+ [
+ TreeChange.delete((b"a", F, blob1.id)),
+ TreeChange.delete((b"a", F, blob2.id)),
+ None,
+ ]
+ ],
+ [parent1, parent2, parent3],
+ merge,
+ )
+
+ def test_tree_changes_for_merge_add_add_same_conflict(self):
+ blob = make_object(Blob, data=b"a\nb\nc\nd\n")
+ parent1 = self.commit_tree([(b"a", blob)])
+ parent2 = self.commit_tree([])
+ merge = self.commit_tree([(b"b", blob)])
+ add = TreeChange.add((b"b", F, blob.id))
+ self.assertChangesForMergeEqual([[add, add]], [parent1, parent2], merge)
+
+ def test_tree_changes_for_merge_add_exact_rename_conflict(self):
+ blob = make_object(Blob, data=b"a\nb\nc\nd\n")
+ parent1 = self.commit_tree([(b"a", blob)])
+ parent2 = self.commit_tree([])
+ merge = self.commit_tree([(b"b", blob)])
+ self.assertChangesForMergeEqual(
+ [
+ [
+ TreeChange(CHANGE_RENAME, (b"a", F, blob.id), (b"b", F, blob.id)),
+ TreeChange.add((b"b", F, blob.id)),
+ ]
+ ],
+ [parent1, parent2],
+ merge,
+ rename_detector=self.detector,
+ )
+
+ def test_tree_changes_for_merge_add_content_rename_conflict(self):
+ blob1 = make_object(Blob, data=b"a\nb\nc\nd\n")
+ blob2 = make_object(Blob, data=b"a\nb\nc\ne\n")
+ parent1 = self.commit_tree([(b"a", blob1)])
+ parent2 = self.commit_tree([])
+ merge = self.commit_tree([(b"b", blob2)])
+ self.assertChangesForMergeEqual(
+ [
+ [
+ TreeChange(CHANGE_RENAME, (b"a", F, blob1.id), (b"b", F, blob2.id)),
+ TreeChange.add((b"b", F, blob2.id)),
+ ]
+ ],
+ [parent1, parent2],
+ merge,
+ rename_detector=self.detector,
+ )
+
+ def test_tree_changes_for_merge_modify_rename_conflict(self):
+ blob1 = make_object(Blob, data=b"a\nb\nc\nd\n")
+ blob2 = make_object(Blob, data=b"a\nb\nc\ne\n")
+ parent1 = self.commit_tree([(b"a", blob1)])
+ parent2 = self.commit_tree([(b"b", blob1)])
+ merge = self.commit_tree([(b"b", blob2)])
+ self.assertChangesForMergeEqual(
+ [
+ [
+ TreeChange(CHANGE_RENAME, (b"a", F, blob1.id), (b"b", F, blob2.id)),
+ TreeChange(CHANGE_MODIFY, (b"b", F, blob1.id), (b"b", F, blob2.id)),
+ ]
+ ],
+ [parent1, parent2],
+ merge,
+ rename_detector=self.detector,
+ )
+
+
+class RenameDetectionTest(DiffTestCase):
+ def _do_test_count_blocks(self, count_blocks):
+ blob = make_object(Blob, data=b"a\nb\na\n")
+ self.assertBlockCountEqual({b"a\n": 4, b"b\n": 2}, count_blocks(blob))
+
+ test_count_blocks = functest_builder(_do_test_count_blocks, _count_blocks_py)
+ test_count_blocks_extension = ext_functest_builder(
+ _do_test_count_blocks, _count_blocks
+ )
+
+ def _do_test_count_blocks_no_newline(self, count_blocks):
+ blob = make_object(Blob, data=b"a\na")
+ self.assertBlockCountEqual({b"a\n": 2, b"a": 1}, _count_blocks(blob))
+
+ test_count_blocks_no_newline = functest_builder(
+ _do_test_count_blocks_no_newline, _count_blocks_py
+ )
+ test_count_blocks_no_newline_extension = ext_functest_builder(
+ _do_test_count_blocks_no_newline, _count_blocks
+ )
+
+ def assertBlockCountEqual(self, expected, got):
+ self.assertEqual(
+ {(hash(l) & 0xFFFFFFFF): c for (l, c) in expected.items()},
+ {(h & 0xFFFFFFFF): c for (h, c) in got.items()},
+ )
+
+ def _do_test_count_blocks_chunks(self, count_blocks):
+ blob = ShaFile.from_raw_chunks(Blob.type_num, [b"a\nb", b"\na\n"])
+ self.assertBlockCountEqual({b"a\n": 4, b"b\n": 2}, _count_blocks(blob))
+
+ test_count_blocks_chunks = functest_builder(
+ _do_test_count_blocks_chunks, _count_blocks_py
+ )
+ test_count_blocks_chunks_extension = ext_functest_builder(
+ _do_test_count_blocks_chunks, _count_blocks
+ )
+
+ def _do_test_count_blocks_long_lines(self, count_blocks):
+ a = b"a" * 64
+ data = a + b"xxx\ny\n" + a + b"zzz\n"
+ blob = make_object(Blob, data=data)
+ self.assertBlockCountEqual(
+ {b"a" * 64: 128, b"xxx\n": 4, b"y\n": 2, b"zzz\n": 4},
+ _count_blocks(blob),
+ )
+
+ test_count_blocks_long_lines = functest_builder(
+ _do_test_count_blocks_long_lines, _count_blocks_py
+ )
+ test_count_blocks_long_lines_extension = ext_functest_builder(
+ _do_test_count_blocks_long_lines, _count_blocks
+ )
+
+ def assertSimilar(self, expected_score, blob1, blob2):
+ self.assertEqual(expected_score, _similarity_score(blob1, blob2))
+ self.assertEqual(expected_score, _similarity_score(blob2, blob1))
+
+ def test_similarity_score(self):
+ blob0 = make_object(Blob, data=b"")
+ blob1 = make_object(Blob, data=b"ab\ncd\ncd\n")
+ blob2 = make_object(Blob, data=b"ab\n")
+ blob3 = make_object(Blob, data=b"cd\n")
+ blob4 = make_object(Blob, data=b"cd\ncd\n")
+
+ self.assertSimilar(100, blob0, blob0)
+ self.assertSimilar(0, blob0, blob1)
+ self.assertSimilar(33, blob1, blob2)
+ self.assertSimilar(33, blob1, blob3)
+ self.assertSimilar(66, blob1, blob4)
+ self.assertSimilar(0, blob2, blob3)
+ self.assertSimilar(50, blob3, blob4)
+
+ def test_similarity_score_cache(self):
+ blob1 = make_object(Blob, data=b"ab\ncd\n")
+ blob2 = make_object(Blob, data=b"ab\n")
+
+ block_cache = {}
+ self.assertEqual(50, _similarity_score(blob1, blob2, block_cache=block_cache))
+ self.assertEqual({blob1.id, blob2.id}, set(block_cache))
+
+ def fail_chunks():
+ self.fail("Unexpected call to as_raw_chunks()")
+
+ blob1.as_raw_chunks = blob2.as_raw_chunks = fail_chunks
+ blob1.raw_length = lambda: 6
+ blob2.raw_length = lambda: 3
+ self.assertEqual(50, _similarity_score(blob1, blob2, block_cache=block_cache))
+
+ def test_tree_entry_sort(self):
+ sha = "abcd" * 10
+ expected_entries = [
+ TreeChange.add(TreeEntry(b"aaa", F, sha)),
+ TreeChange(
+ CHANGE_COPY,
+ TreeEntry(b"bbb", F, sha),
+ TreeEntry(b"aab", F, sha),
+ ),
+ TreeChange(
+ CHANGE_MODIFY,
+ TreeEntry(b"bbb", F, sha),
+ TreeEntry(b"bbb", F, b"dabc" * 10),
+ ),
+ TreeChange(
+ CHANGE_RENAME,
+ TreeEntry(b"bbc", F, sha),
+ TreeEntry(b"ddd", F, sha),
+ ),
+ TreeChange.delete(TreeEntry(b"ccc", F, sha)),
+ ]
+
+ for perm in permutations(expected_entries):
+ self.assertEqual(expected_entries, sorted(perm, key=_tree_change_key))
+
+ def detect_renames(self, tree1, tree2, want_unchanged=False, **kwargs):
+ detector = RenameDetector(self.store, **kwargs)
+ return detector.changes_with_renames(
+ tree1.id, tree2.id, want_unchanged=want_unchanged
+ )
+
+ def test_no_renames(self):
+ blob1 = make_object(Blob, data=b"a\nb\nc\nd\n")
+ blob2 = make_object(Blob, data=b"a\nb\ne\nf\n")
+ blob3 = make_object(Blob, data=b"a\nb\ng\nh\n")
+ tree1 = self.commit_tree([(b"a", blob1), (b"b", blob2)])
+ tree2 = self.commit_tree([(b"a", blob1), (b"b", blob3)])
+ self.assertEqual(
+ [TreeChange(CHANGE_MODIFY, (b"b", F, blob2.id), (b"b", F, blob3.id))],
+ self.detect_renames(tree1, tree2),
+ )
+
+ def test_exact_rename_one_to_one(self):
+ blob1 = make_object(Blob, data=b"1")
+ blob2 = make_object(Blob, data=b"2")
+ tree1 = self.commit_tree([(b"a", blob1), (b"b", blob2)])
+ tree2 = self.commit_tree([(b"c", blob1), (b"d", blob2)])
+ self.assertEqual(
+ [
+ TreeChange(CHANGE_RENAME, (b"a", F, blob1.id), (b"c", F, blob1.id)),
+ TreeChange(CHANGE_RENAME, (b"b", F, blob2.id), (b"d", F, blob2.id)),
+ ],
+ self.detect_renames(tree1, tree2),
+ )
+
+ def test_exact_rename_split_different_type(self):
+ blob = make_object(Blob, data=b"/foo")
+ tree1 = self.commit_tree([(b"a", blob, 0o100644)])
+ tree2 = self.commit_tree([(b"a", blob, 0o120000)])
+ self.assertEqual(
+ [
+ TreeChange.add((b"a", 0o120000, blob.id)),
+ TreeChange.delete((b"a", 0o100644, blob.id)),
+ ],
+ self.detect_renames(tree1, tree2),
+ )
+
+ def test_exact_rename_and_different_type(self):
+ blob1 = make_object(Blob, data=b"1")
+ blob2 = make_object(Blob, data=b"2")
+ tree1 = self.commit_tree([(b"a", blob1)])
+ tree2 = self.commit_tree([(b"a", blob2, 0o120000), (b"b", blob1)])
+ self.assertEqual(
+ [
+ TreeChange.add((b"a", 0o120000, blob2.id)),
+ TreeChange(CHANGE_RENAME, (b"a", F, blob1.id), (b"b", F, blob1.id)),
+ ],
+ self.detect_renames(tree1, tree2),
+ )
+
+ def test_exact_rename_one_to_many(self):
+ blob = make_object(Blob, data=b"1")
+ tree1 = self.commit_tree([(b"a", blob)])
+ tree2 = self.commit_tree([(b"b", blob), (b"c", blob)])
+ self.assertEqual(
+ [
+ TreeChange(CHANGE_RENAME, (b"a", F, blob.id), (b"b", F, blob.id)),
+ TreeChange(CHANGE_COPY, (b"a", F, blob.id), (b"c", F, blob.id)),
+ ],
+ self.detect_renames(tree1, tree2),
+ )
+
+ def test_exact_rename_many_to_one(self):
+ blob = make_object(Blob, data=b"1")
+ tree1 = self.commit_tree([(b"a", blob), (b"b", blob)])
+ tree2 = self.commit_tree([(b"c", blob)])
+ self.assertEqual(
+ [
+ TreeChange(CHANGE_RENAME, (b"a", F, blob.id), (b"c", F, blob.id)),
+ TreeChange.delete((b"b", F, blob.id)),
+ ],
+ self.detect_renames(tree1, tree2),
+ )
+
+ def test_exact_rename_many_to_many(self):
+ blob = make_object(Blob, data=b"1")
+ tree1 = self.commit_tree([(b"a", blob), (b"b", blob)])
+ tree2 = self.commit_tree([(b"c", blob), (b"d", blob), (b"e", blob)])
+ self.assertEqual(
+ [
+ TreeChange(CHANGE_RENAME, (b"a", F, blob.id), (b"c", F, blob.id)),
+ TreeChange(CHANGE_COPY, (b"a", F, blob.id), (b"e", F, blob.id)),
+ TreeChange(CHANGE_RENAME, (b"b", F, blob.id), (b"d", F, blob.id)),
+ ],
+ self.detect_renames(tree1, tree2),
+ )
+
+ def test_exact_copy_modify(self):
+ blob1 = make_object(Blob, data=b"a\nb\nc\nd\n")
+ blob2 = make_object(Blob, data=b"a\nb\nc\ne\n")
+ tree1 = self.commit_tree([(b"a", blob1)])
+ tree2 = self.commit_tree([(b"a", blob2), (b"b", blob1)])
+ self.assertEqual(
+ [
+ TreeChange(CHANGE_MODIFY, (b"a", F, blob1.id), (b"a", F, blob2.id)),
+ TreeChange(CHANGE_COPY, (b"a", F, blob1.id), (b"b", F, blob1.id)),
+ ],
+ self.detect_renames(tree1, tree2),
+ )
+
+ def test_exact_copy_change_mode(self):
+ blob = make_object(Blob, data=b"a\nb\nc\nd\n")
+ tree1 = self.commit_tree([(b"a", blob)])
+ tree2 = self.commit_tree([(b"a", blob, 0o100755), (b"b", blob)])
+ self.assertEqual(
+ [
+ TreeChange(
+ CHANGE_MODIFY,
+ (b"a", F, blob.id),
+ (b"a", 0o100755, blob.id),
+ ),
+ TreeChange(CHANGE_COPY, (b"a", F, blob.id), (b"b", F, blob.id)),
+ ],
+ self.detect_renames(tree1, tree2),
+ )
+
+ def test_rename_threshold(self):
+ blob1 = make_object(Blob, data=b"a\nb\nc\n")
+ blob2 = make_object(Blob, data=b"a\nb\nd\n")
+ tree1 = self.commit_tree([(b"a", blob1)])
+ tree2 = self.commit_tree([(b"b", blob2)])
+ self.assertEqual(
+ [TreeChange(CHANGE_RENAME, (b"a", F, blob1.id), (b"b", F, blob2.id))],
+ self.detect_renames(tree1, tree2, rename_threshold=50),
+ )
+ self.assertEqual(
+ [
+ TreeChange.delete((b"a", F, blob1.id)),
+ TreeChange.add((b"b", F, blob2.id)),
+ ],
+ self.detect_renames(tree1, tree2, rename_threshold=75),
+ )
+
+ def test_content_rename_max_files(self):
+ blob1 = make_object(Blob, data=b"a\nb\nc\nd")
+ blob4 = make_object(Blob, data=b"a\nb\nc\ne\n")
+ blob2 = make_object(Blob, data=b"e\nf\ng\nh\n")
+ blob3 = make_object(Blob, data=b"e\nf\ng\ni\n")
+ tree1 = self.commit_tree([(b"a", blob1), (b"b", blob2)])
+ tree2 = self.commit_tree([(b"c", blob3), (b"d", blob4)])
+ self.assertEqual(
+ [
+ TreeChange(CHANGE_RENAME, (b"a", F, blob1.id), (b"d", F, blob4.id)),
+ TreeChange(CHANGE_RENAME, (b"b", F, blob2.id), (b"c", F, blob3.id)),
+ ],
+ self.detect_renames(tree1, tree2),
+ )
+ self.assertEqual(
+ [
+ TreeChange.delete((b"a", F, blob1.id)),
+ TreeChange.delete((b"b", F, blob2.id)),
+ TreeChange.add((b"c", F, blob3.id)),
+ TreeChange.add((b"d", F, blob4.id)),
+ ],
+ self.detect_renames(tree1, tree2, max_files=1),
+ )
+
+ def test_content_rename_one_to_one(self):
+ b11 = make_object(Blob, data=b"a\nb\nc\nd\n")
+ b12 = make_object(Blob, data=b"a\nb\nc\ne\n")
+ b21 = make_object(Blob, data=b"e\nf\ng\n\nh")
+ b22 = make_object(Blob, data=b"e\nf\ng\n\ni")
+ tree1 = self.commit_tree([(b"a", b11), (b"b", b21)])
+ tree2 = self.commit_tree([(b"c", b12), (b"d", b22)])
+ self.assertEqual(
+ [
+ TreeChange(CHANGE_RENAME, (b"a", F, b11.id), (b"c", F, b12.id)),
+ TreeChange(CHANGE_RENAME, (b"b", F, b21.id), (b"d", F, b22.id)),
+ ],
+ self.detect_renames(tree1, tree2),
+ )
+
+ def test_content_rename_one_to_one_ordering(self):
+ blob1 = make_object(Blob, data=b"a\nb\nc\nd\ne\nf\n")
+ blob2 = make_object(Blob, data=b"a\nb\nc\nd\ng\nh\n")
+ # 6/10 match to blob1, 8/10 match to blob2
+ blob3 = make_object(Blob, data=b"a\nb\nc\nd\ng\ni\n")
+ tree1 = self.commit_tree([(b"a", blob1), (b"b", blob2)])
+ tree2 = self.commit_tree([(b"c", blob3)])
+ self.assertEqual(
+ [
+ TreeChange.delete((b"a", F, blob1.id)),
+ TreeChange(CHANGE_RENAME, (b"b", F, blob2.id), (b"c", F, blob3.id)),
+ ],
+ self.detect_renames(tree1, tree2),
+ )
+
+ tree3 = self.commit_tree([(b"a", blob2), (b"b", blob1)])
+ tree4 = self.commit_tree([(b"c", blob3)])
+ self.assertEqual(
+ [
+ TreeChange(CHANGE_RENAME, (b"a", F, blob2.id), (b"c", F, blob3.id)),
+ TreeChange.delete((b"b", F, blob1.id)),
+ ],
+ self.detect_renames(tree3, tree4),
+ )
+
+ def test_content_rename_one_to_many(self):
+ blob1 = make_object(Blob, data=b"aa\nb\nc\nd\ne\n")
+ blob2 = make_object(Blob, data=b"ab\nb\nc\nd\ne\n") # 8/11 match
+ blob3 = make_object(Blob, data=b"aa\nb\nc\nd\nf\n") # 9/11 match
+ tree1 = self.commit_tree([(b"a", blob1)])
+ tree2 = self.commit_tree([(b"b", blob2), (b"c", blob3)])
+ self.assertEqual(
+ [
+ TreeChange(CHANGE_COPY, (b"a", F, blob1.id), (b"b", F, blob2.id)),
+ TreeChange(CHANGE_RENAME, (b"a", F, blob1.id), (b"c", F, blob3.id)),
+ ],
+ self.detect_renames(tree1, tree2),
+ )
+
+ def test_content_rename_many_to_one(self):
+ blob1 = make_object(Blob, data=b"a\nb\nc\nd\n")
+ blob2 = make_object(Blob, data=b"a\nb\nc\ne\n")
+ blob3 = make_object(Blob, data=b"a\nb\nc\nf\n")
+ tree1 = self.commit_tree([(b"a", blob1), (b"b", blob2)])
+ tree2 = self.commit_tree([(b"c", blob3)])
+ self.assertEqual(
+ [
+ TreeChange(CHANGE_RENAME, (b"a", F, blob1.id), (b"c", F, blob3.id)),
+ TreeChange.delete((b"b", F, blob2.id)),
+ ],
+ self.detect_renames(tree1, tree2),
+ )
+
+ def test_content_rename_many_to_many(self):
+ blob1 = make_object(Blob, data=b"a\nb\nc\nd\n")
+ blob2 = make_object(Blob, data=b"a\nb\nc\ne\n")
+ blob3 = make_object(Blob, data=b"a\nb\nc\nf\n")
+ blob4 = make_object(Blob, data=b"a\nb\nc\ng\n")
+ tree1 = self.commit_tree([(b"a", blob1), (b"b", blob2)])
+ tree2 = self.commit_tree([(b"c", blob3), (b"d", blob4)])
+ # TODO(dborowitz): Distribute renames rather than greedily choosing
+ # copies.
+ self.assertEqual(
+ [
+ TreeChange(CHANGE_RENAME, (b"a", F, blob1.id), (b"c", F, blob3.id)),
+ TreeChange(CHANGE_COPY, (b"a", F, blob1.id), (b"d", F, blob4.id)),
+ TreeChange.delete((b"b", F, blob2.id)),
+ ],
+ self.detect_renames(tree1, tree2),
+ )
+
+ def test_content_rename_with_more_deletions(self):
+ blob1 = make_object(Blob, data=b"")
+ tree1 = self.commit_tree(
+ [(b"a", blob1), (b"b", blob1), (b"c", blob1), (b"d", blob1)]
+ )
+ tree2 = self.commit_tree([(b"e", blob1), (b"f", blob1), (b"g", blob1)])
+ self.maxDiff = None
+ self.assertEqual(
+ [
+ TreeChange(CHANGE_RENAME, (b"a", F, blob1.id), (b"e", F, blob1.id)),
+ TreeChange(CHANGE_RENAME, (b"b", F, blob1.id), (b"f", F, blob1.id)),
+ TreeChange(CHANGE_RENAME, (b"c", F, blob1.id), (b"g", F, blob1.id)),
+ TreeChange.delete((b"d", F, blob1.id)),
+ ],
+ self.detect_renames(tree1, tree2),
+ )
+
+ def test_content_rename_gitlink(self):
+ blob1 = make_object(Blob, data=b"blob1")
+ blob2 = make_object(Blob, data=b"blob2")
+ link1 = b"1" * 40
+ link2 = b"2" * 40
+ tree1 = self.commit_tree([(b"a", blob1), (b"b", link1, 0o160000)])
+ tree2 = self.commit_tree([(b"c", blob2), (b"d", link2, 0o160000)])
+ self.assertEqual(
+ [
+ TreeChange.delete((b"a", 0o100644, blob1.id)),
+ TreeChange.delete((b"b", 0o160000, link1)),
+ TreeChange.add((b"c", 0o100644, blob2.id)),
+ TreeChange.add((b"d", 0o160000, link2)),
+ ],
+ self.detect_renames(tree1, tree2),
+ )
+
+ def test_exact_rename_swap(self):
+ blob1 = make_object(Blob, data=b"1")
+ blob2 = make_object(Blob, data=b"2")
+ tree1 = self.commit_tree([(b"a", blob1), (b"b", blob2)])
+ tree2 = self.commit_tree([(b"a", blob2), (b"b", blob1)])
+ self.assertEqual(
+ [
+ TreeChange(CHANGE_MODIFY, (b"a", F, blob1.id), (b"a", F, blob2.id)),
+ TreeChange(CHANGE_MODIFY, (b"b", F, blob2.id), (b"b", F, blob1.id)),
+ ],
+ self.detect_renames(tree1, tree2),
+ )
+ self.assertEqual(
+ [
+ TreeChange(CHANGE_RENAME, (b"a", F, blob1.id), (b"b", F, blob1.id)),
+ TreeChange(CHANGE_RENAME, (b"b", F, blob2.id), (b"a", F, blob2.id)),
+ ],
+ self.detect_renames(tree1, tree2, rewrite_threshold=50),
+ )
+
+ def test_content_rename_swap(self):
+ blob1 = make_object(Blob, data=b"a\nb\nc\nd\n")
+ blob2 = make_object(Blob, data=b"e\nf\ng\nh\n")
+ blob3 = make_object(Blob, data=b"a\nb\nc\ne\n")
+ blob4 = make_object(Blob, data=b"e\nf\ng\ni\n")
+ tree1 = self.commit_tree([(b"a", blob1), (b"b", blob2)])
+ tree2 = self.commit_tree([(b"a", blob4), (b"b", blob3)])
+ self.assertEqual(
+ [
+ TreeChange(CHANGE_RENAME, (b"a", F, blob1.id), (b"b", F, blob3.id)),
+ TreeChange(CHANGE_RENAME, (b"b", F, blob2.id), (b"a", F, blob4.id)),
+ ],
+ self.detect_renames(tree1, tree2, rewrite_threshold=60),
+ )
+
+ def test_rewrite_threshold(self):
+ blob1 = make_object(Blob, data=b"a\nb\nc\nd\n")
+ blob2 = make_object(Blob, data=b"a\nb\nc\ne\n")
+ blob3 = make_object(Blob, data=b"a\nb\nf\ng\n")
+
+ tree1 = self.commit_tree([(b"a", blob1)])
+ tree2 = self.commit_tree([(b"a", blob3), (b"b", blob2)])
+
+ no_renames = [
+ TreeChange(CHANGE_MODIFY, (b"a", F, blob1.id), (b"a", F, blob3.id)),
+ TreeChange(CHANGE_COPY, (b"a", F, blob1.id), (b"b", F, blob2.id)),
+ ]
+ self.assertEqual(no_renames, self.detect_renames(tree1, tree2))
+ self.assertEqual(
+ no_renames, self.detect_renames(tree1, tree2, rewrite_threshold=40)
+ )
+ self.assertEqual(
+ [
+ TreeChange.add((b"a", F, blob3.id)),
+ TreeChange(CHANGE_RENAME, (b"a", F, blob1.id), (b"b", F, blob2.id)),
+ ],
+ self.detect_renames(tree1, tree2, rewrite_threshold=80),
+ )
+
+ def test_find_copies_harder_exact(self):
+ blob = make_object(Blob, data=b"blob")
+ tree1 = self.commit_tree([(b"a", blob)])
+ tree2 = self.commit_tree([(b"a", blob), (b"b", blob)])
+ self.assertEqual(
+ [TreeChange.add((b"b", F, blob.id))],
+ self.detect_renames(tree1, tree2),
+ )
+ self.assertEqual(
+ [TreeChange(CHANGE_COPY, (b"a", F, blob.id), (b"b", F, blob.id))],
+ self.detect_renames(tree1, tree2, find_copies_harder=True),
+ )
+
+ def test_find_copies_harder_content(self):
+ blob1 = make_object(Blob, data=b"a\nb\nc\nd\n")
+ blob2 = make_object(Blob, data=b"a\nb\nc\ne\n")
+ tree1 = self.commit_tree([(b"a", blob1)])
+ tree2 = self.commit_tree([(b"a", blob1), (b"b", blob2)])
+ self.assertEqual(
+ [TreeChange.add((b"b", F, blob2.id))],
+ self.detect_renames(tree1, tree2),
+ )
+ self.assertEqual(
+ [TreeChange(CHANGE_COPY, (b"a", F, blob1.id), (b"b", F, blob2.id))],
+ self.detect_renames(tree1, tree2, find_copies_harder=True),
+ )
+
+ def test_find_copies_harder_with_rewrites(self):
+ blob_a1 = make_object(Blob, data=b"a\nb\nc\nd\n")
+ blob_a2 = make_object(Blob, data=b"f\ng\nh\ni\n")
+ blob_b2 = make_object(Blob, data=b"a\nb\nc\ne\n")
+ tree1 = self.commit_tree([(b"a", blob_a1)])
+ tree2 = self.commit_tree([(b"a", blob_a2), (b"b", blob_b2)])
+ self.assertEqual(
+ [
+ TreeChange(CHANGE_MODIFY, (b"a", F, blob_a1.id), (b"a", F, blob_a2.id)),
+ TreeChange(CHANGE_COPY, (b"a", F, blob_a1.id), (b"b", F, blob_b2.id)),
+ ],
+ self.detect_renames(tree1, tree2, find_copies_harder=True),
+ )
+ self.assertEqual(
+ [
+ TreeChange.add((b"a", F, blob_a2.id)),
+ TreeChange(CHANGE_RENAME, (b"a", F, blob_a1.id), (b"b", F, blob_b2.id)),
+ ],
+ self.detect_renames(
+ tree1, tree2, rewrite_threshold=50, find_copies_harder=True
+ ),
+ )
+
+ def test_reuse_detector(self):
+ blob = make_object(Blob, data=b"blob")
+ tree1 = self.commit_tree([(b"a", blob)])
+ tree2 = self.commit_tree([(b"b", blob)])
+ detector = RenameDetector(self.store)
+ changes = [TreeChange(CHANGE_RENAME, (b"a", F, blob.id), (b"b", F, blob.id))]
+ self.assertEqual(changes, detector.changes_with_renames(tree1.id, tree2.id))
+ self.assertEqual(changes, detector.changes_with_renames(tree1.id, tree2.id))
+
+ def test_want_unchanged(self):
+ blob_a1 = make_object(Blob, data=b"a\nb\nc\nd\n")
+ blob_b = make_object(Blob, data=b"b")
+ blob_c2 = make_object(Blob, data=b"a\nb\nc\ne\n")
+ tree1 = self.commit_tree([(b"a", blob_a1), (b"b", blob_b)])
+ tree2 = self.commit_tree([(b"c", blob_c2), (b"b", blob_b)])
+ self.assertEqual(
+ [TreeChange(CHANGE_RENAME, (b"a", F, blob_a1.id), (b"c", F, blob_c2.id))],
+ self.detect_renames(tree1, tree2),
+ )
+ self.assertEqual(
+ [
+ TreeChange(CHANGE_RENAME, (b"a", F, blob_a1.id), (b"c", F, blob_c2.id)),
+ TreeChange(
+ CHANGE_UNCHANGED,
+ (b"b", F, blob_b.id),
+ (b"b", F, blob_b.id),
+ ),
+ ],
+ self.detect_renames(tree1, tree2, want_unchanged=True),
+ )
blob - /dev/null
blob + 5076f7767b6d517b08f1dd34c6515586e7763118 (mode 644)
--- /dev/null
+++ tests/test_fastexport.py
+# test_fastexport.py -- Fast export/import functionality
+# Copyright (C) 2010 Jelmer Vernooij <jelmer@jelmer.uk>
+#
+# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
+# General Public License as public by the Free Software Foundation; version 2.0
+# or (at your option) any later version. You can redistribute it and/or
+# modify it under the terms of either of these two licenses.
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# You should have received a copy of the licenses; if not, see
+# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
+# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
+# License, Version 2.0.
+#
+
+import stat
+from io import BytesIO
+
+from dulwich.object_store import MemoryObjectStore
+from dulwich.objects import ZERO_SHA, Blob, Commit, Tree
+from dulwich.repo import MemoryRepo
+
+from . import SkipTest, TestCase
+from .utils import build_commit_graph
+
+
+class GitFastExporterTests(TestCase):
+ """Tests for the GitFastExporter tests."""
+
+ def setUp(self):
+ super().setUp()
+ self.store = MemoryObjectStore()
+ self.stream = BytesIO()
+ try:
+ from dulwich.fastexport import GitFastExporter
+ except ImportError as exc:
+ raise SkipTest("python-fastimport not available") from exc
+ self.fastexporter = GitFastExporter(self.stream, self.store)
+
+ def test_emit_blob(self):
+ b = Blob()
+ b.data = b"fooBAR"
+ self.fastexporter.emit_blob(b)
+ self.assertEqual(b"blob\nmark :1\ndata 6\nfooBAR\n", self.stream.getvalue())
+
+ def test_emit_commit(self):
+ b = Blob()
+ b.data = b"FOO"
+ t = Tree()
+ t.add(b"foo", stat.S_IFREG | 0o644, b.id)
+ c = Commit()
+ c.committer = c.author = b"Jelmer <jelmer@host>"
+ c.author_time = c.commit_time = 1271345553
+ c.author_timezone = c.commit_timezone = 0
+ c.message = b"msg"
+ c.tree = t.id
+ self.store.add_objects([(b, None), (t, None), (c, None)])
+ self.fastexporter.emit_commit(c, b"refs/heads/master")
+ self.assertEqual(
+ b"""blob
+mark :1
+data 3
+FOO
+commit refs/heads/master
+mark :2
+author Jelmer <jelmer@host> 1271345553 +0000
+committer Jelmer <jelmer@host> 1271345553 +0000
+data 3
+msg
+M 644 :1 foo
+""",
+ self.stream.getvalue(),
+ )
+
+
+class GitImportProcessorTests(TestCase):
+ """Tests for the GitImportProcessor tests."""
+
+ def setUp(self):
+ super().setUp()
+ self.repo = MemoryRepo()
+ try:
+ from dulwich.fastexport import GitImportProcessor
+ except ImportError as exc:
+ raise SkipTest("python-fastimport not available") from exc
+ self.processor = GitImportProcessor(self.repo)
+
+ def test_reset_handler(self):
+ from fastimport import commands
+
+ [c1] = build_commit_graph(self.repo.object_store, [[1]])
+ cmd = commands.ResetCommand(b"refs/heads/foo", c1.id)
+ self.processor.reset_handler(cmd)
+ self.assertEqual(c1.id, self.repo.get_refs()[b"refs/heads/foo"])
+ self.assertEqual(c1.id, self.processor.last_commit)
+
+ def test_reset_handler_marker(self):
+ from fastimport import commands
+
+ [c1, c2] = build_commit_graph(self.repo.object_store, [[1], [2]])
+ self.processor.markers[b"10"] = c1.id
+ cmd = commands.ResetCommand(b"refs/heads/foo", b":10")
+ self.processor.reset_handler(cmd)
+ self.assertEqual(c1.id, self.repo.get_refs()[b"refs/heads/foo"])
+
+ def test_reset_handler_default(self):
+ from fastimport import commands
+
+ [c1, c2] = build_commit_graph(self.repo.object_store, [[1], [2]])
+ cmd = commands.ResetCommand(b"refs/heads/foo", None)
+ self.processor.reset_handler(cmd)
+ self.assertEqual(ZERO_SHA, self.repo.get_refs()[b"refs/heads/foo"])
+
+ def test_commit_handler(self):
+ from fastimport import commands
+
+ cmd = commands.CommitCommand(
+ b"refs/heads/foo",
+ b"mrkr",
+ (b"Jelmer", b"jelmer@samba.org", 432432432.0, 3600),
+ (b"Jelmer", b"jelmer@samba.org", 432432432.0, 3600),
+ b"FOO",
+ None,
+ [],
+ [],
+ )
+ self.processor.commit_handler(cmd)
+ commit = self.repo[self.processor.last_commit]
+ self.assertEqual(b"Jelmer <jelmer@samba.org>", commit.author)
+ self.assertEqual(b"Jelmer <jelmer@samba.org>", commit.committer)
+ self.assertEqual(b"FOO", commit.message)
+ self.assertEqual([], commit.parents)
+ self.assertEqual(432432432.0, commit.commit_time)
+ self.assertEqual(432432432.0, commit.author_time)
+ self.assertEqual(3600, commit.commit_timezone)
+ self.assertEqual(3600, commit.author_timezone)
+ self.assertEqual(commit, self.repo[b"refs/heads/foo"])
+
+ def test_commit_handler_markers(self):
+ from fastimport import commands
+
+ [c1, c2, c3] = build_commit_graph(self.repo.object_store, [[1], [2], [3]])
+ self.processor.markers[b"10"] = c1.id
+ self.processor.markers[b"42"] = c2.id
+ self.processor.markers[b"98"] = c3.id
+ cmd = commands.CommitCommand(
+ b"refs/heads/foo",
+ b"mrkr",
+ (b"Jelmer", b"jelmer@samba.org", 432432432.0, 3600),
+ (b"Jelmer", b"jelmer@samba.org", 432432432.0, 3600),
+ b"FOO",
+ b":10",
+ [b":42", b":98"],
+ [],
+ )
+ self.processor.commit_handler(cmd)
+ commit = self.repo[self.processor.last_commit]
+ self.assertEqual(c1.id, commit.parents[0])
+ self.assertEqual(c2.id, commit.parents[1])
+ self.assertEqual(c3.id, commit.parents[2])
+
+ def test_import_stream(self):
+ markers = self.processor.import_stream(
+ BytesIO(
+ b"""blob
+mark :1
+data 11
+text for a
+
+commit refs/heads/master
+mark :2
+committer Joe Foo <joe@foo.com> 1288287382 +0000
+data 20
+<The commit message>
+M 100644 :1 a
+
+"""
+ )
+ )
+ self.assertEqual(2, len(markers))
+ self.assertIsInstance(self.repo[markers[b"1"]], Blob)
+ self.assertIsInstance(self.repo[markers[b"2"]], Commit)
+
+ def test_file_add(self):
+ from fastimport import commands
+
+ cmd = commands.BlobCommand(b"23", b"data")
+ self.processor.blob_handler(cmd)
+ cmd = commands.CommitCommand(
+ b"refs/heads/foo",
+ b"mrkr",
+ (b"Jelmer", b"jelmer@samba.org", 432432432.0, 3600),
+ (b"Jelmer", b"jelmer@samba.org", 432432432.0, 3600),
+ b"FOO",
+ None,
+ [],
+ [commands.FileModifyCommand(b"path", 0o100644, b":23", None)],
+ )
+ self.processor.commit_handler(cmd)
+ commit = self.repo[self.processor.last_commit]
+ self.assertEqual(
+ [(b"path", 0o100644, b"6320cd248dd8aeaab759d5871f8781b5c0505172")],
+ self.repo[commit.tree].items(),
+ )
+
+ def simple_commit(self):
+ from fastimport import commands
+
+ cmd = commands.BlobCommand(b"23", b"data")
+ self.processor.blob_handler(cmd)
+ cmd = commands.CommitCommand(
+ b"refs/heads/foo",
+ b"mrkr",
+ (b"Jelmer", b"jelmer@samba.org", 432432432.0, 3600),
+ (b"Jelmer", b"jelmer@samba.org", 432432432.0, 3600),
+ b"FOO",
+ None,
+ [],
+ [commands.FileModifyCommand(b"path", 0o100644, b":23", None)],
+ )
+ self.processor.commit_handler(cmd)
+ commit = self.repo[self.processor.last_commit]
+ return commit
+
+ def make_file_commit(self, file_cmds):
+ """Create a trivial commit with the specified file commands.
+
+ Args:
+ file_cmds: File commands to run.
+ Returns: The created commit object
+ """
+ from fastimport import commands
+
+ cmd = commands.CommitCommand(
+ b"refs/heads/foo",
+ b"mrkr",
+ (b"Jelmer", b"jelmer@samba.org", 432432432.0, 3600),
+ (b"Jelmer", b"jelmer@samba.org", 432432432.0, 3600),
+ b"FOO",
+ None,
+ [],
+ file_cmds,
+ )
+ self.processor.commit_handler(cmd)
+ return self.repo[self.processor.last_commit]
+
+ def test_file_copy(self):
+ from fastimport import commands
+
+ self.simple_commit()
+ commit = self.make_file_commit([commands.FileCopyCommand(b"path", b"new_path")])
+ self.assertEqual(
+ [
+ (
+ b"new_path",
+ 0o100644,
+ b"6320cd248dd8aeaab759d5871f8781b5c0505172",
+ ),
+ (
+ b"path",
+ 0o100644,
+ b"6320cd248dd8aeaab759d5871f8781b5c0505172",
+ ),
+ ],
+ self.repo[commit.tree].items(),
+ )
+
+ def test_file_move(self):
+ from fastimport import commands
+
+ self.simple_commit()
+ commit = self.make_file_commit(
+ [commands.FileRenameCommand(b"path", b"new_path")]
+ )
+ self.assertEqual(
+ [
+ (
+ b"new_path",
+ 0o100644,
+ b"6320cd248dd8aeaab759d5871f8781b5c0505172",
+ ),
+ ],
+ self.repo[commit.tree].items(),
+ )
+
+ def test_file_delete(self):
+ from fastimport import commands
+
+ self.simple_commit()
+ commit = self.make_file_commit([commands.FileDeleteCommand(b"path")])
+ self.assertEqual([], self.repo[commit.tree].items())
+
+ def test_file_deleteall(self):
+ from fastimport import commands
+
+ self.simple_commit()
+ commit = self.make_file_commit([commands.FileDeleteAllCommand()])
+ self.assertEqual([], self.repo[commit.tree].items())
blob - /dev/null
blob + 482fb023f6822afe452acb713a10973fef1fb52c (mode 644)
--- /dev/null
+++ tests/test_file.py
+# test_file.py -- Test for git files
+# Copyright (C) 2010 Google, Inc.
+#
+# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
+# General Public License as public by the Free Software Foundation; version 2.0
+# or (at your option) any later version. You can redistribute it and/or
+# modify it under the terms of either of these two licenses.
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# You should have received a copy of the licenses; if not, see
+# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
+# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
+# License, Version 2.0.
+#
+
+import io
+import os
+import shutil
+import sys
+import tempfile
+
+from dulwich.file import FileLocked, GitFile, _fancy_rename
+
+from . import SkipTest, TestCase
+
+
+class FancyRenameTests(TestCase):
+ def setUp(self):
+ super().setUp()
+ self._tempdir = tempfile.mkdtemp()
+ self.foo = self.path("foo")
+ self.bar = self.path("bar")
+ self.create(self.foo, b"foo contents")
+
+ def tearDown(self):
+ shutil.rmtree(self._tempdir)
+ super().tearDown()
+
+ def path(self, filename):
+ return os.path.join(self._tempdir, filename)
+
+ def create(self, path, contents):
+ f = open(path, "wb")
+ f.write(contents)
+ f.close()
+
+ def test_no_dest_exists(self):
+ self.assertFalse(os.path.exists(self.bar))
+ _fancy_rename(self.foo, self.bar)
+ self.assertFalse(os.path.exists(self.foo))
+
+ new_f = open(self.bar, "rb")
+ self.assertEqual(b"foo contents", new_f.read())
+ new_f.close()
+
+ def test_dest_exists(self):
+ self.create(self.bar, b"bar contents")
+ _fancy_rename(self.foo, self.bar)
+ self.assertFalse(os.path.exists(self.foo))
+
+ new_f = open(self.bar, "rb")
+ self.assertEqual(b"foo contents", new_f.read())
+ new_f.close()
+
+ def test_dest_opened(self):
+ if sys.platform != "win32":
+ raise SkipTest("platform allows overwriting open files")
+ self.create(self.bar, b"bar contents")
+ dest_f = open(self.bar, "rb")
+ self.assertRaises(OSError, _fancy_rename, self.foo, self.bar)
+ dest_f.close()
+ self.assertTrue(os.path.exists(self.path("foo")))
+
+ new_f = open(self.foo, "rb")
+ self.assertEqual(b"foo contents", new_f.read())
+ new_f.close()
+
+ new_f = open(self.bar, "rb")
+ self.assertEqual(b"bar contents", new_f.read())
+ new_f.close()
+
+
+class GitFileTests(TestCase):
+ def setUp(self):
+ super().setUp()
+ self._tempdir = tempfile.mkdtemp()
+ f = open(self.path("foo"), "wb")
+ f.write(b"foo contents")
+ f.close()
+
+ def tearDown(self):
+ shutil.rmtree(self._tempdir)
+ super().tearDown()
+
+ def path(self, filename):
+ return os.path.join(self._tempdir, filename)
+
+ def test_invalid(self):
+ foo = self.path("foo")
+ self.assertRaises(IOError, GitFile, foo, mode="r")
+ self.assertRaises(IOError, GitFile, foo, mode="ab")
+ self.assertRaises(IOError, GitFile, foo, mode="r+b")
+ self.assertRaises(IOError, GitFile, foo, mode="w+b")
+ self.assertRaises(IOError, GitFile, foo, mode="a+bU")
+
+ def test_readonly(self):
+ f = GitFile(self.path("foo"), "rb")
+ self.assertIsInstance(f, io.IOBase)
+ self.assertEqual(b"foo contents", f.read())
+ self.assertEqual(b"", f.read())
+ f.seek(4)
+ self.assertEqual(b"contents", f.read())
+ f.close()
+
+ def test_default_mode(self):
+ f = GitFile(self.path("foo"))
+ self.assertEqual(b"foo contents", f.read())
+ f.close()
+
+ def test_write(self):
+ foo = self.path("foo")
+ foo_lock = "%s.lock" % foo
+
+ orig_f = open(foo, "rb")
+ self.assertEqual(orig_f.read(), b"foo contents")
+ orig_f.close()
+
+ self.assertFalse(os.path.exists(foo_lock))
+ f = GitFile(foo, "wb")
+ self.assertFalse(f.closed)
+ self.assertRaises(AttributeError, getattr, f, "not_a_file_property")
+
+ self.assertTrue(os.path.exists(foo_lock))
+ f.write(b"new stuff")
+ f.seek(4)
+ f.write(b"contents")
+ f.close()
+ self.assertFalse(os.path.exists(foo_lock))
+
+ new_f = open(foo, "rb")
+ self.assertEqual(b"new contents", new_f.read())
+ new_f.close()
+
+ def test_open_twice(self):
+ foo = self.path("foo")
+ f1 = GitFile(foo, "wb")
+ f1.write(b"new")
+ try:
+ f2 = GitFile(foo, "wb")
+ self.fail()
+ except FileLocked:
+ pass
+ else:
+ f2.close()
+ f1.write(b" contents")
+ f1.close()
+
+ # Ensure trying to open twice doesn't affect original.
+ f = open(foo, "rb")
+ self.assertEqual(b"new contents", f.read())
+ f.close()
+
+ def test_abort(self):
+ foo = self.path("foo")
+ foo_lock = "%s.lock" % foo
+
+ orig_f = open(foo, "rb")
+ self.assertEqual(orig_f.read(), b"foo contents")
+ orig_f.close()
+
+ f = GitFile(foo, "wb")
+ f.write(b"new contents")
+ f.abort()
+ self.assertTrue(f.closed)
+ self.assertFalse(os.path.exists(foo_lock))
+
+ new_orig_f = open(foo, "rb")
+ self.assertEqual(new_orig_f.read(), b"foo contents")
+ new_orig_f.close()
+
+ def test_abort_close(self):
+ foo = self.path("foo")
+ f = GitFile(foo, "wb")
+ f.abort()
+ try:
+ f.close()
+ except OSError:
+ self.fail()
+
+ f = GitFile(foo, "wb")
+ f.close()
+ try:
+ f.abort()
+ except OSError:
+ self.fail()
+
+ def test_abort_close_removed(self):
+ foo = self.path("foo")
+ f = GitFile(foo, "wb")
+
+ f._file.close()
+ os.remove(foo + ".lock")
+
+ f.abort()
+ self.assertTrue(f._closed)
blob - /dev/null
blob + f345391b0295bc75584194f31fcc10eb155e164e (mode 644)
--- /dev/null
+++ tests/test_grafts.py
+# test_grafts.py -- Tests for graftpoints
+#
+# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
+# General Public License as public by the Free Software Foundation; version 2.0
+# or (at your option) any later version. You can redistribute it and/or
+# modify it under the terms of either of these two licenses.
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# You should have received a copy of the licenses; if not, see
+# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
+# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
+# License, Version 2.0.
+#
+
+"""Tests for graftpoints."""
+
+import os
+import shutil
+import tempfile
+
+from dulwich.errors import ObjectFormatException
+from dulwich.objects import Tree
+from dulwich.repo import MemoryRepo, Repo, parse_graftpoints, serialize_graftpoints
+
+from . import TestCase
+
+
+def makesha(digit):
+ return (str(digit).encode("ascii") * 40)[:40]
+
+
+class GraftParserTests(TestCase):
+ def assertParse(self, expected, graftpoints):
+ self.assertEqual(expected, parse_graftpoints(iter(graftpoints)))
+
+ def test_no_grafts(self):
+ self.assertParse({}, [])
+
+ def test_no_parents(self):
+ self.assertParse({makesha(0): []}, [makesha(0)])
+
+ def test_parents(self):
+ self.assertParse(
+ {makesha(0): [makesha(1), makesha(2)]},
+ [b" ".join([makesha(0), makesha(1), makesha(2)])],
+ )
+
+ def test_multiple_hybrid(self):
+ self.assertParse(
+ {
+ makesha(0): [],
+ makesha(1): [makesha(2)],
+ makesha(3): [makesha(4), makesha(5)],
+ },
+ [
+ makesha(0),
+ b" ".join([makesha(1), makesha(2)]),
+ b" ".join([makesha(3), makesha(4), makesha(5)]),
+ ],
+ )
+
+
+class GraftSerializerTests(TestCase):
+ def assertSerialize(self, expected, graftpoints):
+ self.assertEqual(sorted(expected), sorted(serialize_graftpoints(graftpoints)))
+
+ def test_no_grafts(self):
+ self.assertSerialize(b"", {})
+
+ def test_no_parents(self):
+ self.assertSerialize(makesha(0), {makesha(0): []})
+
+ def test_parents(self):
+ self.assertSerialize(
+ b" ".join([makesha(0), makesha(1), makesha(2)]),
+ {makesha(0): [makesha(1), makesha(2)]},
+ )
+
+ def test_multiple_hybrid(self):
+ self.assertSerialize(
+ b"\n".join(
+ [
+ makesha(0),
+ b" ".join([makesha(1), makesha(2)]),
+ b" ".join([makesha(3), makesha(4), makesha(5)]),
+ ]
+ ),
+ {
+ makesha(0): [],
+ makesha(1): [makesha(2)],
+ makesha(3): [makesha(4), makesha(5)],
+ },
+ )
+
+
+class GraftsInRepositoryBase:
+ def tearDown(self):
+ super().tearDown()
+
+ def get_repo_with_grafts(self, grafts):
+ r = self._repo
+ r._add_graftpoints(grafts)
+ return r
+
+ def test_no_grafts(self):
+ r = self.get_repo_with_grafts({})
+
+ shas = [e.commit.id for e in r.get_walker()]
+ self.assertEqual(shas, self._shas[::-1])
+
+ def test_no_parents_graft(self):
+ r = self.get_repo_with_grafts({self._repo.head(): []})
+
+ self.assertEqual([e.commit.id for e in r.get_walker()], [r.head()])
+
+ def test_existing_parent_graft(self):
+ r = self.get_repo_with_grafts({self._shas[-1]: [self._shas[0]]})
+
+ self.assertEqual(
+ [e.commit.id for e in r.get_walker()],
+ [self._shas[-1], self._shas[0]],
+ )
+
+ def test_remove_graft(self):
+ r = self.get_repo_with_grafts({self._repo.head(): []})
+ r._remove_graftpoints([self._repo.head()])
+
+ self.assertEqual([e.commit.id for e in r.get_walker()], self._shas[::-1])
+
+ def test_object_store_fail_invalid_parents(self):
+ r = self._repo
+
+ self.assertRaises(
+ ObjectFormatException, r._add_graftpoints, {self._shas[-1]: ["1"]}
+ )
+
+
+class GraftsInRepoTests(GraftsInRepositoryBase, TestCase):
+ def setUp(self):
+ super().setUp()
+ self._repo_dir = os.path.join(tempfile.mkdtemp())
+ r = self._repo = Repo.init(self._repo_dir)
+ self.addCleanup(shutil.rmtree, self._repo_dir)
+
+ self._shas = []
+
+ commit_kwargs = {
+ "committer": b"Test Committer <test@nodomain.com>",
+ "author": b"Test Author <test@nodomain.com>",
+ "commit_timestamp": 12395,
+ "commit_timezone": 0,
+ "author_timestamp": 12395,
+ "author_timezone": 0,
+ }
+
+ self._shas.append(r.do_commit(b"empty commit", **commit_kwargs))
+ self._shas.append(r.do_commit(b"empty commit", **commit_kwargs))
+ self._shas.append(r.do_commit(b"empty commit", **commit_kwargs))
+
+ def test_init_with_empty_info_grafts(self):
+ r = self._repo
+ r._put_named_file(os.path.join("info", "grafts"), b"")
+
+ r = Repo(self._repo_dir)
+ self.assertEqual({}, r._graftpoints)
+
+ def test_init_with_info_grafts(self):
+ r = self._repo
+ r._put_named_file(
+ os.path.join("info", "grafts"),
+ self._shas[-1] + b" " + self._shas[0],
+ )
+
+ r = Repo(self._repo_dir)
+ self.assertEqual({self._shas[-1]: [self._shas[0]]}, r._graftpoints)
+
+
+class GraftsInMemoryRepoTests(GraftsInRepositoryBase, TestCase):
+ def setUp(self):
+ super().setUp()
+ r = self._repo = MemoryRepo()
+
+ self._shas = []
+
+ tree = Tree()
+
+ commit_kwargs = {
+ "committer": b"Test Committer <test@nodomain.com>",
+ "author": b"Test Author <test@nodomain.com>",
+ "commit_timestamp": 12395,
+ "commit_timezone": 0,
+ "author_timestamp": 12395,
+ "author_timezone": 0,
+ "tree": tree.id,
+ }
+
+ self._shas.append(r.do_commit(b"empty commit", **commit_kwargs))
+ self._shas.append(r.do_commit(b"empty commit", **commit_kwargs))
+ self._shas.append(r.do_commit(b"empty commit", **commit_kwargs))
blob - /dev/null
blob + 0d14b5e356fca3b51749d7c540ef9d6a0b00d93f (mode 644)
--- /dev/null
+++ tests/test_graph.py
+# test_graph.py -- Tests for merge base
+# Copyright (c) 2020 Kevin B. Hendricks, Stratford Ontario Canada
+#
+# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
+# General Public License as public by the Free Software Foundation; version 2.0
+# or (at your option) any later version. You can redistribute it and/or
+# modify it under the terms of either of these two licenses.
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# You should have received a copy of the licenses; if not, see
+# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
+# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
+# License, Version 2.0.
+
+"""Tests for dulwich.graph."""
+
+from dulwich.graph import WorkList, _find_lcas, can_fast_forward
+from dulwich.repo import MemoryRepo
+
+from . import TestCase
+from .utils import make_commit
+
+
+class FindMergeBaseTests(TestCase):
+ @staticmethod
+ def run_test(dag, inputs):
+ def lookup_parents(commit_id):
+ return dag[commit_id]
+
+ def lookup_stamp(commit_id):
+ # any constant timestamp value here will work to force
+ # this test to test the same behaviour as done previously
+ return 100
+
+ c1 = inputs[0]
+ c2s = inputs[1:]
+ return set(_find_lcas(lookup_parents, c1, c2s, lookup_stamp))
+
+ def test_multiple_lca(self):
+ # two lowest common ancestors
+ graph = {
+ "5": ["1", "2"],
+ "4": ["3", "1"],
+ "3": ["2"],
+ "2": ["0"],
+ "1": [],
+ "0": [],
+ }
+ self.assertEqual(self.run_test(graph, ["4", "5"]), {"1", "2"})
+
+ def test_no_common_ancestor(self):
+ # no common ancestor
+ graph = {
+ "4": ["2"],
+ "3": ["1"],
+ "2": [],
+ "1": ["0"],
+ "0": [],
+ }
+ self.assertEqual(self.run_test(graph, ["4", "3"]), set())
+
+ def test_ancestor(self):
+ # ancestor
+ graph = {
+ "G": ["D", "F"],
+ "F": ["E"],
+ "D": ["C"],
+ "C": ["B"],
+ "E": ["B"],
+ "B": ["A"],
+ "A": [],
+ }
+ self.assertEqual(self.run_test(graph, ["D", "C"]), {"C"})
+
+ def test_direct_parent(self):
+ # parent
+ graph = {
+ "G": ["D", "F"],
+ "F": ["E"],
+ "D": ["C"],
+ "C": ["B"],
+ "E": ["B"],
+ "B": ["A"],
+ "A": [],
+ }
+ self.assertEqual(self.run_test(graph, ["G", "D"]), {"D"})
+
+ def test_another_crossover(self):
+ # Another cross over
+ graph = {
+ "G": ["D", "F"],
+ "F": ["E", "C"],
+ "D": ["C", "E"],
+ "C": ["B"],
+ "E": ["B"],
+ "B": ["A"],
+ "A": [],
+ }
+ self.assertEqual(self.run_test(graph, ["D", "F"]), {"E", "C"})
+
+ def test_three_way_merge_lca(self):
+ # three way merge commit straight from git docs
+ graph = {
+ "C": ["C1"],
+ "C1": ["C2"],
+ "C2": ["C3"],
+ "C3": ["C4"],
+ "C4": ["2"],
+ "B": ["B1"],
+ "B1": ["B2"],
+ "B2": ["B3"],
+ "B3": ["1"],
+ "A": ["A1"],
+ "A1": ["A2"],
+ "A2": ["A3"],
+ "A3": ["1"],
+ "1": ["2"],
+ "2": [],
+ }
+ # assumes a theoretical merge M exists that merges B and C first
+ # which actually means find the first LCA from either of B OR C with A
+ self.assertEqual(self.run_test(graph, ["A", "B", "C"]), {"1"})
+
+ def test_octopus(self):
+ # octopus algorithm test
+ # test straight from git docs of A, B, and C
+ # but this time use octopus to find lcas of A, B, and C simultaneously
+ graph = {
+ "C": ["C1"],
+ "C1": ["C2"],
+ "C2": ["C3"],
+ "C3": ["C4"],
+ "C4": ["2"],
+ "B": ["B1"],
+ "B1": ["B2"],
+ "B2": ["B3"],
+ "B3": ["1"],
+ "A": ["A1"],
+ "A1": ["A2"],
+ "A2": ["A3"],
+ "A3": ["1"],
+ "1": ["2"],
+ "2": [],
+ }
+
+ def lookup_parents(cid):
+ return graph[cid]
+
+ def lookup_stamp(commit_id):
+ # any constant timestamp value here will work to force
+ # this test to test the same behaviour as done previously
+ return 100
+
+ lcas = ["A"]
+ others = ["B", "C"]
+ for cmt in others:
+ next_lcas = []
+ for ca in lcas:
+ res = _find_lcas(lookup_parents, cmt, [ca], lookup_stamp)
+ next_lcas.extend(res)
+ lcas = next_lcas[:]
+ self.assertEqual(set(lcas), {"2"})
+
+
+class CanFastForwardTests(TestCase):
+ def test_ff(self):
+ r = MemoryRepo()
+ base = make_commit()
+ c1 = make_commit(parents=[base.id])
+ c2 = make_commit(parents=[c1.id])
+ r.object_store.add_objects([(base, None), (c1, None), (c2, None)])
+ self.assertTrue(can_fast_forward(r, c1.id, c1.id))
+ self.assertTrue(can_fast_forward(r, base.id, c1.id))
+ self.assertTrue(can_fast_forward(r, c1.id, c2.id))
+ self.assertFalse(can_fast_forward(r, c2.id, c1.id))
+
+ def test_diverged(self):
+ r = MemoryRepo()
+ base = make_commit()
+ c1 = make_commit(parents=[base.id])
+ c2a = make_commit(parents=[c1.id], message=b"2a")
+ c2b = make_commit(parents=[c1.id], message=b"2b")
+ r.object_store.add_objects([(base, None), (c1, None), (c2a, None), (c2b, None)])
+ self.assertTrue(can_fast_forward(r, c1.id, c2a.id))
+ self.assertTrue(can_fast_forward(r, c1.id, c2b.id))
+ self.assertFalse(can_fast_forward(r, c2a.id, c2b.id))
+ self.assertFalse(can_fast_forward(r, c2b.id, c2a.id))
+
+
+class WorkListTest(TestCase):
+ def test_WorkList(self):
+ # tuples of (timestamp, value) are stored in a Priority MaxQueue
+ # repeated use of get should return them in maxheap timestamp
+ # order: largest time value (most recent in time) first then earlier/older
+ wlst = WorkList()
+ wlst.add((100, "Test Value 1"))
+ wlst.add((50, "Test Value 2"))
+ wlst.add((200, "Test Value 3"))
+ self.assertTrue(wlst.get() == (200, "Test Value 3"))
+ self.assertTrue(wlst.get() == (100, "Test Value 1"))
+ wlst.add((150, "Test Value 4"))
+ self.assertTrue(wlst.get() == (150, "Test Value 4"))
+ self.assertTrue(wlst.get() == (50, "Test Value 2"))
blob - /dev/null
blob + 40e33c9d91dc7dd9fa130370f85b8cb3208e080e (mode 644)
--- /dev/null
+++ tests/test_greenthreads.py
+# test_greenthreads.py -- Unittests for eventlet.
+# Copyright (C) 2013 eNovance SAS <licensing@enovance.com>
+#
+# Author: Fabien Boucher <fabien.boucher@enovance.com>
+#
+# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
+# General Public License as public by the Free Software Foundation; version 2.0
+# or (at your option) any later version. You can redistribute it and/or
+# modify it under the terms of either of these two licenses.
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# You should have received a copy of the licenses; if not, see
+# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
+# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
+# License, Version 2.0.
+#
+
+import time
+
+from dulwich.object_store import MemoryObjectStore
+from dulwich.objects import Blob, Commit, Tree, parse_timezone
+
+from . import TestCase, skipIf
+
+try:
+ import gevent # noqa: F401
+
+ gevent_support = True
+except ImportError:
+ gevent_support = False
+
+if gevent_support:
+ from ..greenthreads import GreenThreadsMissingObjectFinder
+
+skipmsg = "Gevent library is not installed"
+
+
+def create_commit(marker=None):
+ blob = Blob.from_string(b"The blob content " + marker)
+ tree = Tree()
+ tree.add(b"thefile " + marker, 0o100644, blob.id)
+ cmt = Commit()
+ cmt.tree = tree.id
+ cmt.author = cmt.committer = b"John Doe <john@doe.net>"
+ cmt.message = marker
+ tz = parse_timezone(b"-0200")[0]
+ cmt.commit_time = cmt.author_time = int(time.time())
+ cmt.commit_timezone = cmt.author_timezone = tz
+ return cmt, tree, blob
+
+
+def init_store(store, count=1):
+ ret = []
+ for i in range(count):
+ objs = create_commit(marker=("%d" % i).encode("ascii"))
+ for obj in objs:
+ ret.append(obj)
+ store.add_object(obj)
+ return ret
+
+
+@skipIf(not gevent_support, skipmsg)
+class TestGreenThreadsMissingObjectFinder(TestCase):
+ def setUp(self):
+ super().setUp()
+ self.store = MemoryObjectStore()
+ self.cmt_amount = 10
+ self.objs = init_store(self.store, self.cmt_amount)
+
+ def test_finder(self):
+ wants = [sha.id for sha in self.objs if isinstance(sha, Commit)]
+ finder = GreenThreadsMissingObjectFinder(self.store, (), wants)
+ self.assertEqual(len(finder.sha_done), 0)
+ self.assertEqual(len(finder.objects_to_send), self.cmt_amount)
+
+ finder = GreenThreadsMissingObjectFinder(
+ self.store, wants[0 : int(self.cmt_amount / 2)], wants
+ )
+ # sha_done will contains commit id and sha of blob referred in tree
+ self.assertEqual(len(finder.sha_done), (self.cmt_amount / 2) * 2)
+ self.assertEqual(len(finder.objects_to_send), self.cmt_amount / 2)
blob - /dev/null
blob + 965c98925e7785b5c9c665ce3007f802d65a7e32 (mode 644)
--- /dev/null
+++ tests/test_hooks.py
+# test_hooks.py -- Tests for executing hooks
+#
+# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
+# General Public License as public by the Free Software Foundation; version 2.0
+# or (at your option) any later version. You can redistribute it and/or
+# modify it under the terms of either of these two licenses.
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# You should have received a copy of the licenses; if not, see
+# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
+# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
+# License, Version 2.0.
+#
+
+"""Tests for executing hooks."""
+
+import os
+import shutil
+import stat
+import sys
+import tempfile
+
+from dulwich import errors
+from dulwich.hooks import CommitMsgShellHook, PostCommitShellHook, PreCommitShellHook
+
+from . import TestCase
+
+
+class ShellHookTests(TestCase):
+ def setUp(self):
+ super().setUp()
+ if os.name != "posix":
+ self.skipTest("shell hook tests requires POSIX shell")
+ self.assertTrue(os.path.exists("/bin/sh"))
+
+ def test_hook_pre_commit(self):
+ repo_dir = os.path.join(tempfile.mkdtemp())
+ os.mkdir(os.path.join(repo_dir, "hooks"))
+ self.addCleanup(shutil.rmtree, repo_dir)
+
+ pre_commit_fail = """#!/bin/sh
+exit 1
+"""
+
+ pre_commit_success = """#!/bin/sh
+exit 0
+"""
+ pre_commit_cwd = (
+ """#!/bin/sh
+if [ "$(pwd)" != '"""
+ + repo_dir
+ + """' ]; then
+ echo "Expected path '"""
+ + repo_dir
+ + """', got '$(pwd)'"
+ exit 1
+fi
+
+exit 0
+"""
+ )
+
+ pre_commit = os.path.join(repo_dir, "hooks", "pre-commit")
+ hook = PreCommitShellHook(repo_dir, repo_dir)
+
+ with open(pre_commit, "w") as f:
+ f.write(pre_commit_fail)
+ os.chmod(pre_commit, stat.S_IREAD | stat.S_IWRITE | stat.S_IEXEC)
+
+ self.assertRaises(errors.HookError, hook.execute)
+
+ if sys.platform != "darwin":
+ # Don't bother running this test on darwin since path
+ # canonicalization messages with our simple string comparison.
+ with open(pre_commit, "w") as f:
+ f.write(pre_commit_cwd)
+ os.chmod(pre_commit, stat.S_IREAD | stat.S_IWRITE | stat.S_IEXEC)
+
+ hook.execute()
+
+ with open(pre_commit, "w") as f:
+ f.write(pre_commit_success)
+ os.chmod(pre_commit, stat.S_IREAD | stat.S_IWRITE | stat.S_IEXEC)
+
+ hook.execute()
+
+ def test_hook_commit_msg(self):
+ repo_dir = os.path.join(tempfile.mkdtemp())
+ os.mkdir(os.path.join(repo_dir, "hooks"))
+ self.addCleanup(shutil.rmtree, repo_dir)
+
+ commit_msg_fail = """#!/bin/sh
+exit 1
+"""
+
+ commit_msg_success = """#!/bin/sh
+exit 0
+"""
+
+ commit_msg_cwd = (
+ """#!/bin/sh
+if [ "$(pwd)" = '"""
+ + repo_dir
+ + "' ]; then exit 0; else exit 1; fi\n"
+ )
+
+ commit_msg = os.path.join(repo_dir, "hooks", "commit-msg")
+ hook = CommitMsgShellHook(repo_dir)
+
+ with open(commit_msg, "w") as f:
+ f.write(commit_msg_fail)
+ os.chmod(commit_msg, stat.S_IREAD | stat.S_IWRITE | stat.S_IEXEC)
+
+ self.assertRaises(errors.HookError, hook.execute, b"failed commit")
+
+ if sys.platform != "darwin":
+ # Don't bother running this test on darwin since path
+ # canonicalization messages with our simple string comparison.
+ with open(commit_msg, "w") as f:
+ f.write(commit_msg_cwd)
+ os.chmod(commit_msg, stat.S_IREAD | stat.S_IWRITE | stat.S_IEXEC)
+
+ hook.execute(b"cwd test commit")
+
+ with open(commit_msg, "w") as f:
+ f.write(commit_msg_success)
+ os.chmod(commit_msg, stat.S_IREAD | stat.S_IWRITE | stat.S_IEXEC)
+
+ hook.execute(b"empty commit")
+
+ def test_hook_post_commit(self):
+ (fd, path) = tempfile.mkstemp()
+ os.close(fd)
+
+ repo_dir = os.path.join(tempfile.mkdtemp())
+ os.mkdir(os.path.join(repo_dir, "hooks"))
+ self.addCleanup(shutil.rmtree, repo_dir)
+
+ post_commit_success = (
+ """#!/bin/sh
+rm """
+ + path
+ + "\n"
+ )
+
+ post_commit_fail = """#!/bin/sh
+exit 1
+"""
+
+ post_commit_cwd = (
+ """#!/bin/sh
+if [ "$(pwd)" = '"""
+ + repo_dir
+ + "' ]; then exit 0; else exit 1; fi\n"
+ )
+
+ post_commit = os.path.join(repo_dir, "hooks", "post-commit")
+ hook = PostCommitShellHook(repo_dir)
+
+ with open(post_commit, "w") as f:
+ f.write(post_commit_fail)
+ os.chmod(post_commit, stat.S_IREAD | stat.S_IWRITE | stat.S_IEXEC)
+
+ self.assertRaises(errors.HookError, hook.execute)
+
+ if sys.platform != "darwin":
+ # Don't bother running this test on darwin since path
+ # canonicalization messages with our simple string comparison.
+ with open(post_commit, "w") as f:
+ f.write(post_commit_cwd)
+ os.chmod(post_commit, stat.S_IREAD | stat.S_IWRITE | stat.S_IEXEC)
+
+ hook.execute()
+
+ with open(post_commit, "w") as f:
+ f.write(post_commit_success)
+ os.chmod(post_commit, stat.S_IREAD | stat.S_IWRITE | stat.S_IEXEC)
+
+ hook.execute()
+ self.assertFalse(os.path.exists(path))
blob - /dev/null
blob + d051d1d46cbf2f184a56253d05a6fb912de1c2d0 (mode 644)
--- /dev/null
+++ tests/test_ignore.py
+# test_ignore.py -- Tests for ignore files.
+# Copyright (C) 2017 Jelmer Vernooij <jelmer@jelmer.uk>
+#
+# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
+# General Public License as public by the Free Software Foundation; version 2.0
+# or (at your option) any later version. You can redistribute it and/or
+# modify it under the terms of either of these two licenses.
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# You should have received a copy of the licenses; if not, see
+# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
+# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
+# License, Version 2.0.
+#
+
+"""Tests for ignore files."""
+
+import os
+import re
+import shutil
+import tempfile
+from io import BytesIO
+
+from dulwich.ignore import (
+ IgnoreFilter,
+ IgnoreFilterManager,
+ IgnoreFilterStack,
+ Pattern,
+ match_pattern,
+ read_ignore_patterns,
+ translate,
+)
+from dulwich.repo import Repo
+
+from . import TestCase
+
+POSITIVE_MATCH_TESTS = [
+ (b"foo.c", b"*.c"),
+ (b".c", b"*.c"),
+ (b"foo/foo.c", b"*.c"),
+ (b"foo/foo.c", b"foo.c"),
+ (b"foo.c", b"/*.c"),
+ (b"foo.c", b"/foo.c"),
+ (b"foo.c", b"foo.c"),
+ (b"foo.c", b"foo.[ch]"),
+ (b"foo/bar/bla.c", b"foo/**"),
+ (b"foo/bar/bla/blie.c", b"foo/**/blie.c"),
+ (b"foo/bar/bla.c", b"**/bla.c"),
+ (b"bla.c", b"**/bla.c"),
+ (b"foo/bar", b"foo/**/bar"),
+ (b"foo/bla/bar", b"foo/**/bar"),
+ (b"foo/bar/", b"bar/"),
+ (b"foo/bar/", b"bar"),
+ (b"foo/bar/something", b"foo/bar/*"),
+]
+
+NEGATIVE_MATCH_TESTS = [
+ (b"foo.c", b"foo.[dh]"),
+ (b"foo/foo.c", b"/foo.c"),
+ (b"foo/foo.c", b"/*.c"),
+ (b"foo/bar/", b"/bar/"),
+ (b"foo/bar/", b"foo/bar/*"),
+ (b"foo/bar", b"foo?bar"),
+]
+
+
+TRANSLATE_TESTS = [
+ (b"*.c", b"(?ms)(.*/)?[^/]*\\.c/?\\Z"),
+ (b"foo.c", b"(?ms)(.*/)?foo\\.c/?\\Z"),
+ (b"/*.c", b"(?ms)[^/]*\\.c/?\\Z"),
+ (b"/foo.c", b"(?ms)foo\\.c/?\\Z"),
+ (b"foo.c", b"(?ms)(.*/)?foo\\.c/?\\Z"),
+ (b"foo.[ch]", b"(?ms)(.*/)?foo\\.[ch]/?\\Z"),
+ (b"bar/", b"(?ms)(.*/)?bar\\/\\Z"),
+ (b"foo/**", b"(?ms)foo(/.*)?/?\\Z"),
+ (b"foo/**/blie.c", b"(?ms)foo(/.*)?\\/blie\\.c/?\\Z"),
+ (b"**/bla.c", b"(?ms)(.*/)?bla\\.c/?\\Z"),
+ (b"foo/**/bar", b"(?ms)foo(/.*)?\\/bar/?\\Z"),
+ (b"foo/bar/*", b"(?ms)foo\\/bar\\/[^/]+/?\\Z"),
+ (b"/foo\\[bar\\]", b"(?ms)foo\\[bar\\]/?\\Z"),
+ (b"/foo[bar]", b"(?ms)foo[bar]/?\\Z"),
+ (b"/foo[0-9]", b"(?ms)foo[0-9]/?\\Z"),
+]
+
+
+class TranslateTests(TestCase):
+ def test_translate(self):
+ for pattern, regex in TRANSLATE_TESTS:
+ if re.escape(b"/") == b"/":
+ # Slash is no longer escaped in Python3.7, so undo the escaping
+ # in the expected return value..
+ regex = regex.replace(b"\\/", b"/")
+ self.assertEqual(
+ regex,
+ translate(pattern),
+ f"orig pattern: {pattern!r}, regex: {translate(pattern)!r}, expected: {regex!r}",
+ )
+
+
+class ReadIgnorePatterns(TestCase):
+ def test_read_file(self):
+ f = BytesIO(
+ b"""
+# a comment
+\x20\x20
+# and an empty line:
+
+\\#not a comment
+!negative
+with trailing whitespace
+with escaped trailing whitespace\\
+"""
+ )
+ self.assertEqual(
+ list(read_ignore_patterns(f)),
+ [
+ b"\\#not a comment",
+ b"!negative",
+ b"with trailing whitespace",
+ b"with escaped trailing whitespace ",
+ ],
+ )
+
+
+class MatchPatternTests(TestCase):
+ def test_matches(self):
+ for path, pattern in POSITIVE_MATCH_TESTS:
+ self.assertTrue(
+ match_pattern(path, pattern),
+ f"path: {path!r}, pattern: {pattern!r}",
+ )
+
+ def test_no_matches(self):
+ for path, pattern in NEGATIVE_MATCH_TESTS:
+ self.assertFalse(
+ match_pattern(path, pattern),
+ f"path: {path!r}, pattern: {pattern!r}",
+ )
+
+
+class IgnoreFilterTests(TestCase):
+ def test_included(self):
+ filter = IgnoreFilter([b"a.c", b"b.c"])
+ self.assertTrue(filter.is_ignored(b"a.c"))
+ self.assertIs(None, filter.is_ignored(b"c.c"))
+ self.assertEqual([Pattern(b"a.c")], list(filter.find_matching(b"a.c")))
+ self.assertEqual([], list(filter.find_matching(b"c.c")))
+
+ def test_included_ignorecase(self):
+ filter = IgnoreFilter([b"a.c", b"b.c"], ignorecase=False)
+ self.assertTrue(filter.is_ignored(b"a.c"))
+ self.assertFalse(filter.is_ignored(b"A.c"))
+ filter = IgnoreFilter([b"a.c", b"b.c"], ignorecase=True)
+ self.assertTrue(filter.is_ignored(b"a.c"))
+ self.assertTrue(filter.is_ignored(b"A.c"))
+ self.assertTrue(filter.is_ignored(b"A.C"))
+
+ def test_excluded(self):
+ filter = IgnoreFilter([b"a.c", b"b.c", b"!c.c"])
+ self.assertFalse(filter.is_ignored(b"c.c"))
+ self.assertIs(None, filter.is_ignored(b"d.c"))
+ self.assertEqual([Pattern(b"!c.c")], list(filter.find_matching(b"c.c")))
+ self.assertEqual([], list(filter.find_matching(b"d.c")))
+
+ def test_include_exclude_include(self):
+ filter = IgnoreFilter([b"a.c", b"!a.c", b"a.c"])
+ self.assertTrue(filter.is_ignored(b"a.c"))
+ self.assertEqual(
+ [Pattern(b"a.c"), Pattern(b"!a.c"), Pattern(b"a.c")],
+ list(filter.find_matching(b"a.c")),
+ )
+
+ def test_manpage(self):
+ # A specific example from the gitignore manpage
+ filter = IgnoreFilter([b"/*", b"!/foo", b"/foo/*", b"!/foo/bar"])
+ self.assertTrue(filter.is_ignored(b"a.c"))
+ self.assertTrue(filter.is_ignored(b"foo/blie"))
+ self.assertFalse(filter.is_ignored(b"foo"))
+ self.assertFalse(filter.is_ignored(b"foo/bar"))
+ self.assertFalse(filter.is_ignored(b"foo/bar/"))
+ self.assertFalse(filter.is_ignored(b"foo/bar/bloe"))
+
+ def test_regex_special(self):
+ # See https://github.com/dulwich/dulwich/issues/930#issuecomment-1026166429
+ filter = IgnoreFilter([b"/foo\\[bar\\]", b"/foo"])
+ self.assertTrue(filter.is_ignored("foo"))
+ self.assertTrue(filter.is_ignored("foo[bar]"))
+
+
+class IgnoreFilterStackTests(TestCase):
+ def test_stack_first(self):
+ filter1 = IgnoreFilter([b"[a].c", b"[b].c", b"![d].c"])
+ filter2 = IgnoreFilter([b"[a].c", b"![b],c", b"[c].c", b"[d].c"])
+ stack = IgnoreFilterStack([filter1, filter2])
+ self.assertIs(True, stack.is_ignored(b"a.c"))
+ self.assertIs(True, stack.is_ignored(b"b.c"))
+ self.assertIs(True, stack.is_ignored(b"c.c"))
+ self.assertIs(False, stack.is_ignored(b"d.c"))
+ self.assertIs(None, stack.is_ignored(b"e.c"))
+
+
+class IgnoreFilterManagerTests(TestCase):
+ def test_load_ignore(self):
+ tmp_dir = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, tmp_dir)
+ repo = Repo.init(tmp_dir)
+ with open(os.path.join(repo.path, ".gitignore"), "wb") as f:
+ f.write(b"/foo/bar\n")
+ f.write(b"/dir2\n")
+ f.write(b"/dir3/\n")
+ os.mkdir(os.path.join(repo.path, "dir"))
+ with open(os.path.join(repo.path, "dir", ".gitignore"), "wb") as f:
+ f.write(b"/blie\n")
+ with open(os.path.join(repo.path, "dir", "blie"), "wb") as f:
+ f.write(b"IGNORED")
+ p = os.path.join(repo.controldir(), "info", "exclude")
+ with open(p, "wb") as f:
+ f.write(b"/excluded\n")
+ m = IgnoreFilterManager.from_repo(repo)
+ self.assertTrue(m.is_ignored("dir/blie"))
+ self.assertIs(None, m.is_ignored(os.path.join("dir", "bloe")))
+ self.assertIs(None, m.is_ignored("dir"))
+ self.assertTrue(m.is_ignored(os.path.join("foo", "bar")))
+ self.assertTrue(m.is_ignored(os.path.join("excluded")))
+ self.assertTrue(m.is_ignored(os.path.join("dir2", "fileinignoreddir")))
+ self.assertFalse(m.is_ignored("dir3"))
+ self.assertTrue(m.is_ignored("dir3/"))
+ self.assertTrue(m.is_ignored("dir3/bla"))
+
+ def test_nested_gitignores(self):
+ tmp_dir = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, tmp_dir)
+ repo = Repo.init(tmp_dir)
+
+ with open(os.path.join(repo.path, ".gitignore"), "wb") as f:
+ f.write(b"/*\n")
+ f.write(b"!/foo\n")
+
+ os.mkdir(os.path.join(repo.path, "foo"))
+ with open(os.path.join(repo.path, "foo", ".gitignore"), "wb") as f:
+ f.write(b"/bar\n")
+
+ with open(os.path.join(repo.path, "foo", "bar"), "wb") as f:
+ f.write(b"IGNORED")
+
+ m = IgnoreFilterManager.from_repo(repo)
+ self.assertTrue(m.is_ignored("foo/bar"))
+
+ def test_load_ignore_ignorecase(self):
+ tmp_dir = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, tmp_dir)
+ repo = Repo.init(tmp_dir)
+ config = repo.get_config()
+ config.set(b"core", b"ignorecase", True)
+ config.write_to_path()
+ with open(os.path.join(repo.path, ".gitignore"), "wb") as f:
+ f.write(b"/foo/bar\n")
+ f.write(b"/dir\n")
+ m = IgnoreFilterManager.from_repo(repo)
+ self.assertTrue(m.is_ignored(os.path.join("dir", "blie")))
+ self.assertTrue(m.is_ignored(os.path.join("DIR", "blie")))
+
+ def test_ignored_contents(self):
+ tmp_dir = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, tmp_dir)
+ repo = Repo.init(tmp_dir)
+ with open(os.path.join(repo.path, ".gitignore"), "wb") as f:
+ f.write(b"a/*\n")
+ f.write(b"!a/*.txt\n")
+ m = IgnoreFilterManager.from_repo(repo)
+ os.mkdir(os.path.join(repo.path, "a"))
+ self.assertIs(None, m.is_ignored("a"))
+ self.assertIs(None, m.is_ignored("a/"))
+ self.assertFalse(m.is_ignored("a/b.txt"))
+ self.assertTrue(m.is_ignored("a/c.dat"))
blob - /dev/null
blob + 7768773cd139a4c80118f495613757b061f79a99 (mode 644)
--- /dev/null
+++ tests/test_index.py
+# test_index.py -- Tests for the git index
+# Copyright (C) 2008-2009 Jelmer Vernooij <jelmer@jelmer.uk>
+#
+# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
+# General Public License as public by the Free Software Foundation; version 2.0
+# or (at your option) any later version. You can redistribute it and/or
+# modify it under the terms of either of these two licenses.
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# You should have received a copy of the licenses; if not, see
+# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
+# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
+# License, Version 2.0.
+#
+
+"""Tests for the index."""
+
+import os
+import shutil
+import stat
+import struct
+import sys
+import tempfile
+from io import BytesIO
+
+from dulwich.index import (
+ Index,
+ IndexEntry,
+ SerializedIndexEntry,
+ _fs_to_tree_path,
+ _tree_to_fs_path,
+ build_index_from_tree,
+ cleanup_mode,
+ commit_tree,
+ get_unstaged_changes,
+ index_entry_from_stat,
+ read_index,
+ read_index_dict,
+ validate_path_element_default,
+ validate_path_element_ntfs,
+ write_cache_time,
+ write_index,
+ write_index_dict,
+)
+from dulwich.object_store import MemoryObjectStore
+from dulwich.objects import S_IFGITLINK, Blob, Commit, Tree
+from dulwich.repo import Repo
+
+from . import TestCase, skipIf
+
+
+def can_symlink():
+ """Return whether running process can create symlinks."""
+ if sys.platform != "win32":
+ # Platforms other than Windows should allow symlinks without issues.
+ return True
+
+ test_source = tempfile.mkdtemp()
+ test_target = test_source + "can_symlink"
+ try:
+ os.symlink(test_source, test_target)
+ except (NotImplementedError, OSError):
+ return False
+ return True
+
+
+class IndexTestCase(TestCase):
+ datadir = os.path.join(os.path.dirname(__file__), "../testdata/indexes")
+
+ def get_simple_index(self, name):
+ return Index(os.path.join(self.datadir, name))
+
+
+class SimpleIndexTestCase(IndexTestCase):
+ def test_len(self):
+ self.assertEqual(1, len(self.get_simple_index("index")))
+
+ def test_iter(self):
+ self.assertEqual([b"bla"], list(self.get_simple_index("index")))
+
+ def test_iterobjects(self):
+ self.assertEqual(
+ [(b"bla", b"e69de29bb2d1d6434b8b29ae775ad8c2e48c5391", 33188)],
+ list(self.get_simple_index("index").iterobjects()),
+ )
+
+ def test_getitem(self):
+ self.assertEqual(
+ IndexEntry(
+ (1230680220, 0),
+ (1230680220, 0),
+ 2050,
+ 3761020,
+ 33188,
+ 1000,
+ 1000,
+ 0,
+ b"e69de29bb2d1d6434b8b29ae775ad8c2e48c5391",
+ ),
+ self.get_simple_index("index")[b"bla"],
+ )
+
+ def test_empty(self):
+ i = self.get_simple_index("notanindex")
+ self.assertEqual(0, len(i))
+ self.assertFalse(os.path.exists(i._filename))
+
+ def test_against_empty_tree(self):
+ i = self.get_simple_index("index")
+ changes = list(i.changes_from_tree(MemoryObjectStore(), None))
+ self.assertEqual(1, len(changes))
+ (oldname, newname), (oldmode, newmode), (oldsha, newsha) = changes[0]
+ self.assertEqual(b"bla", newname)
+ self.assertEqual(b"e69de29bb2d1d6434b8b29ae775ad8c2e48c5391", newsha)
+
+
+class SimpleIndexWriterTestCase(IndexTestCase):
+ def setUp(self):
+ IndexTestCase.setUp(self)
+ self.tempdir = tempfile.mkdtemp()
+
+ def tearDown(self):
+ IndexTestCase.tearDown(self)
+ shutil.rmtree(self.tempdir)
+
+ def test_simple_write(self):
+ entries = [
+ (
+ SerializedIndexEntry(
+ b"barbla",
+ (1230680220, 0),
+ (1230680220, 0),
+ 2050,
+ 3761020,
+ 33188,
+ 1000,
+ 1000,
+ 0,
+ b"e69de29bb2d1d6434b8b29ae775ad8c2e48c5391",
+ 0,
+ 0,
+ )
+ )
+ ]
+ filename = os.path.join(self.tempdir, "test-simple-write-index")
+ with open(filename, "wb+") as x:
+ write_index(x, entries)
+
+ with open(filename, "rb") as x:
+ self.assertEqual(entries, list(read_index(x)))
+
+
+class ReadIndexDictTests(IndexTestCase):
+ def setUp(self):
+ IndexTestCase.setUp(self)
+ self.tempdir = tempfile.mkdtemp()
+
+ def tearDown(self):
+ IndexTestCase.tearDown(self)
+ shutil.rmtree(self.tempdir)
+
+ def test_simple_write(self):
+ entries = {
+ b"barbla": IndexEntry(
+ (1230680220, 0),
+ (1230680220, 0),
+ 2050,
+ 3761020,
+ 33188,
+ 1000,
+ 1000,
+ 0,
+ b"e69de29bb2d1d6434b8b29ae775ad8c2e48c5391",
+ )
+ }
+ filename = os.path.join(self.tempdir, "test-simple-write-index")
+ with open(filename, "wb+") as x:
+ write_index_dict(x, entries)
+
+ with open(filename, "rb") as x:
+ self.assertEqual(entries, read_index_dict(x))
+
+
+class CommitTreeTests(TestCase):
+ def setUp(self):
+ super().setUp()
+ self.store = MemoryObjectStore()
+
+ def test_single_blob(self):
+ blob = Blob()
+ blob.data = b"foo"
+ self.store.add_object(blob)
+ blobs = [(b"bla", blob.id, stat.S_IFREG)]
+ rootid = commit_tree(self.store, blobs)
+ self.assertEqual(rootid, b"1a1e80437220f9312e855c37ac4398b68e5c1d50")
+ self.assertEqual((stat.S_IFREG, blob.id), self.store[rootid][b"bla"])
+ self.assertEqual({rootid, blob.id}, set(self.store._data.keys()))
+
+ def test_nested(self):
+ blob = Blob()
+ blob.data = b"foo"
+ self.store.add_object(blob)
+ blobs = [(b"bla/bar", blob.id, stat.S_IFREG)]
+ rootid = commit_tree(self.store, blobs)
+ self.assertEqual(rootid, b"d92b959b216ad0d044671981196781b3258fa537")
+ dirid = self.store[rootid][b"bla"][1]
+ self.assertEqual(dirid, b"c1a1deb9788150829579a8b4efa6311e7b638650")
+ self.assertEqual((stat.S_IFDIR, dirid), self.store[rootid][b"bla"])
+ self.assertEqual((stat.S_IFREG, blob.id), self.store[dirid][b"bar"])
+ self.assertEqual({rootid, dirid, blob.id}, set(self.store._data.keys()))
+
+
+class CleanupModeTests(TestCase):
+ def assertModeEqual(self, expected, got):
+ self.assertEqual(expected, got, f"{expected:o} != {got:o}")
+
+ def test_file(self):
+ self.assertModeEqual(0o100644, cleanup_mode(0o100000))
+
+ def test_executable(self):
+ self.assertModeEqual(0o100755, cleanup_mode(0o100711))
+ self.assertModeEqual(0o100755, cleanup_mode(0o100700))
+
+ def test_symlink(self):
+ self.assertModeEqual(0o120000, cleanup_mode(0o120711))
+
+ def test_dir(self):
+ self.assertModeEqual(0o040000, cleanup_mode(0o40531))
+
+ def test_submodule(self):
+ self.assertModeEqual(0o160000, cleanup_mode(0o160744))
+
+
+class WriteCacheTimeTests(TestCase):
+ def test_write_string(self):
+ f = BytesIO()
+ self.assertRaises(TypeError, write_cache_time, f, "foo")
+
+ def test_write_int(self):
+ f = BytesIO()
+ write_cache_time(f, 434343)
+ self.assertEqual(struct.pack(">LL", 434343, 0), f.getvalue())
+
+ def test_write_tuple(self):
+ f = BytesIO()
+ write_cache_time(f, (434343, 21))
+ self.assertEqual(struct.pack(">LL", 434343, 21), f.getvalue())
+
+ def test_write_float(self):
+ f = BytesIO()
+ write_cache_time(f, 434343.000000021)
+ self.assertEqual(struct.pack(">LL", 434343, 21), f.getvalue())
+
+
+class IndexEntryFromStatTests(TestCase):
+ def test_simple(self):
+ st = os.stat_result(
+ (
+ 16877,
+ 131078,
+ 64769,
+ 154,
+ 1000,
+ 1000,
+ 12288,
+ 1323629595,
+ 1324180496,
+ 1324180496,
+ )
+ )
+ entry = index_entry_from_stat(st, b"22" * 20)
+ self.assertEqual(
+ entry,
+ IndexEntry(
+ 1324180496,
+ 1324180496,
+ 64769,
+ 131078,
+ 16384,
+ 1000,
+ 1000,
+ 12288,
+ b"2222222222222222222222222222222222222222",
+ ),
+ )
+
+ def test_override_mode(self):
+ st = os.stat_result(
+ (
+ stat.S_IFREG + 0o644,
+ 131078,
+ 64769,
+ 154,
+ 1000,
+ 1000,
+ 12288,
+ 1323629595,
+ 1324180496,
+ 1324180496,
+ )
+ )
+ entry = index_entry_from_stat(st, b"22" * 20, mode=stat.S_IFREG + 0o755)
+ self.assertEqual(
+ entry,
+ IndexEntry(
+ 1324180496,
+ 1324180496,
+ 64769,
+ 131078,
+ 33261,
+ 1000,
+ 1000,
+ 12288,
+ b"2222222222222222222222222222222222222222",
+ ),
+ )
+
+
+class BuildIndexTests(TestCase):
+ def assertReasonableIndexEntry(self, index_entry, mode, filesize, sha):
+ self.assertEqual(index_entry.mode, mode) # mode
+ self.assertEqual(index_entry.size, filesize) # filesize
+ self.assertEqual(index_entry.sha, sha) # sha
+
+ def assertFileContents(self, path, contents, symlink=False):
+ if symlink:
+ self.assertEqual(os.readlink(path), contents)
+ else:
+ with open(path, "rb") as f:
+ self.assertEqual(f.read(), contents)
+
+ def test_empty(self):
+ repo_dir = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, repo_dir)
+ with Repo.init(repo_dir) as repo:
+ tree = Tree()
+ repo.object_store.add_object(tree)
+
+ build_index_from_tree(
+ repo.path, repo.index_path(), repo.object_store, tree.id
+ )
+
+ # Verify index entries
+ index = repo.open_index()
+ self.assertEqual(len(index), 0)
+
+ # Verify no files
+ self.assertEqual([".git"], os.listdir(repo.path))
+
+ def test_git_dir(self):
+ repo_dir = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, repo_dir)
+ with Repo.init(repo_dir) as repo:
+ # Populate repo
+ filea = Blob.from_string(b"file a")
+ filee = Blob.from_string(b"d")
+
+ tree = Tree()
+ tree[b".git/a"] = (stat.S_IFREG | 0o644, filea.id)
+ tree[b"c/e"] = (stat.S_IFREG | 0o644, filee.id)
+
+ repo.object_store.add_objects([(o, None) for o in [filea, filee, tree]])
+
+ build_index_from_tree(
+ repo.path, repo.index_path(), repo.object_store, tree.id
+ )
+
+ # Verify index entries
+ index = repo.open_index()
+ self.assertEqual(len(index), 1)
+
+ # filea
+ apath = os.path.join(repo.path, ".git", "a")
+ self.assertFalse(os.path.exists(apath))
+
+ # filee
+ epath = os.path.join(repo.path, "c", "e")
+ self.assertTrue(os.path.exists(epath))
+ self.assertReasonableIndexEntry(
+ index[b"c/e"], stat.S_IFREG | 0o644, 1, filee.id
+ )
+ self.assertFileContents(epath, b"d")
+
+ def test_nonempty(self):
+ repo_dir = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, repo_dir)
+ with Repo.init(repo_dir) as repo:
+ # Populate repo
+ filea = Blob.from_string(b"file a")
+ fileb = Blob.from_string(b"file b")
+ filed = Blob.from_string(b"file d")
+
+ tree = Tree()
+ tree[b"a"] = (stat.S_IFREG | 0o644, filea.id)
+ tree[b"b"] = (stat.S_IFREG | 0o644, fileb.id)
+ tree[b"c/d"] = (stat.S_IFREG | 0o644, filed.id)
+
+ repo.object_store.add_objects(
+ [(o, None) for o in [filea, fileb, filed, tree]]
+ )
+
+ build_index_from_tree(
+ repo.path, repo.index_path(), repo.object_store, tree.id
+ )
+
+ # Verify index entries
+ index = repo.open_index()
+ self.assertEqual(len(index), 3)
+
+ # filea
+ apath = os.path.join(repo.path, "a")
+ self.assertTrue(os.path.exists(apath))
+ self.assertReasonableIndexEntry(
+ index[b"a"], stat.S_IFREG | 0o644, 6, filea.id
+ )
+ self.assertFileContents(apath, b"file a")
+
+ # fileb
+ bpath = os.path.join(repo.path, "b")
+ self.assertTrue(os.path.exists(bpath))
+ self.assertReasonableIndexEntry(
+ index[b"b"], stat.S_IFREG | 0o644, 6, fileb.id
+ )
+ self.assertFileContents(bpath, b"file b")
+
+ # filed
+ dpath = os.path.join(repo.path, "c", "d")
+ self.assertTrue(os.path.exists(dpath))
+ self.assertReasonableIndexEntry(
+ index[b"c/d"], stat.S_IFREG | 0o644, 6, filed.id
+ )
+ self.assertFileContents(dpath, b"file d")
+
+ # Verify no extra files
+ self.assertEqual([".git", "a", "b", "c"], sorted(os.listdir(repo.path)))
+ self.assertEqual(["d"], sorted(os.listdir(os.path.join(repo.path, "c"))))
+
+ @skipIf(not getattr(os, "sync", None), "Requires sync support")
+ def test_norewrite(self):
+ repo_dir = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, repo_dir)
+ with Repo.init(repo_dir) as repo:
+ # Populate repo
+ filea = Blob.from_string(b"file a")
+ filea_path = os.path.join(repo_dir, "a")
+ tree = Tree()
+ tree[b"a"] = (stat.S_IFREG | 0o644, filea.id)
+
+ repo.object_store.add_objects([(o, None) for o in [filea, tree]])
+
+ # First Write
+ build_index_from_tree(
+ repo.path, repo.index_path(), repo.object_store, tree.id
+ )
+ # Use sync as metadata can be cached on some FS
+ os.sync()
+ mtime = os.stat(filea_path).st_mtime
+
+ # Test Rewrite
+ build_index_from_tree(
+ repo.path, repo.index_path(), repo.object_store, tree.id
+ )
+ os.sync()
+ self.assertEqual(mtime, os.stat(filea_path).st_mtime)
+
+ # Modify content
+ with open(filea_path, "wb") as fh:
+ fh.write(b"test a")
+ os.sync()
+ mtime = os.stat(filea_path).st_mtime
+
+ # Test rewrite
+ build_index_from_tree(
+ repo.path, repo.index_path(), repo.object_store, tree.id
+ )
+ os.sync()
+ with open(filea_path, "rb") as fh:
+ self.assertEqual(b"file a", fh.read())
+
+ @skipIf(not can_symlink(), "Requires symlink support")
+ def test_symlink(self):
+ repo_dir = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, repo_dir)
+ with Repo.init(repo_dir) as repo:
+ # Populate repo
+ filed = Blob.from_string(b"file d")
+ filee = Blob.from_string(b"d")
+
+ tree = Tree()
+ tree[b"c/d"] = (stat.S_IFREG | 0o644, filed.id)
+ tree[b"c/e"] = (stat.S_IFLNK, filee.id) # symlink
+
+ repo.object_store.add_objects([(o, None) for o in [filed, filee, tree]])
+
+ build_index_from_tree(
+ repo.path, repo.index_path(), repo.object_store, tree.id
+ )
+
+ # Verify index entries
+ index = repo.open_index()
+
+ # symlink to d
+ epath = os.path.join(repo.path, "c", "e")
+ self.assertTrue(os.path.exists(epath))
+ self.assertReasonableIndexEntry(
+ index[b"c/e"],
+ stat.S_IFLNK,
+ 0 if sys.platform == "win32" else 1,
+ filee.id,
+ )
+ self.assertFileContents(epath, "d", symlink=True)
+
+ def test_no_decode_encode(self):
+ repo_dir = tempfile.mkdtemp()
+ repo_dir_bytes = os.fsencode(repo_dir)
+ self.addCleanup(shutil.rmtree, repo_dir)
+ with Repo.init(repo_dir) as repo:
+ # Populate repo
+ file = Blob.from_string(b"foo")
+
+ tree = Tree()
+ latin1_name = "À".encode("latin1")
+ latin1_path = os.path.join(repo_dir_bytes, latin1_name)
+ utf8_name = "À".encode()
+ utf8_path = os.path.join(repo_dir_bytes, utf8_name)
+ tree[latin1_name] = (stat.S_IFREG | 0o644, file.id)
+ tree[utf8_name] = (stat.S_IFREG | 0o644, file.id)
+
+ repo.object_store.add_objects([(o, None) for o in [file, tree]])
+
+ try:
+ build_index_from_tree(
+ repo.path, repo.index_path(), repo.object_store, tree.id
+ )
+ except OSError as e:
+ if e.errno == 92 and sys.platform == "darwin":
+ # Our filename isn't supported by the platform :(
+ self.skipTest("can not write filename %r" % e.filename)
+ else:
+ raise
+ except UnicodeDecodeError:
+ # This happens e.g. with python3.6 on Windows.
+ # It implicitly decodes using utf8, which doesn't work.
+ self.skipTest("can not implicitly convert as utf8")
+
+ # Verify index entries
+ index = repo.open_index()
+ self.assertIn(latin1_name, index)
+ self.assertIn(utf8_name, index)
+
+ self.assertTrue(os.path.exists(latin1_path))
+
+ self.assertTrue(os.path.exists(utf8_path))
+
+ def test_git_submodule(self):
+ repo_dir = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, repo_dir)
+ with Repo.init(repo_dir) as repo:
+ filea = Blob.from_string(b"file alalala")
+
+ subtree = Tree()
+ subtree[b"a"] = (stat.S_IFREG | 0o644, filea.id)
+
+ c = Commit()
+ c.tree = subtree.id
+ c.committer = c.author = b"Somebody <somebody@example.com>"
+ c.commit_time = c.author_time = 42342
+ c.commit_timezone = c.author_timezone = 0
+ c.parents = []
+ c.message = b"Subcommit"
+
+ tree = Tree()
+ tree[b"c"] = (S_IFGITLINK, c.id)
+
+ repo.object_store.add_objects([(o, None) for o in [tree]])
+
+ build_index_from_tree(
+ repo.path, repo.index_path(), repo.object_store, tree.id
+ )
+
+ # Verify index entries
+ index = repo.open_index()
+ self.assertEqual(len(index), 1)
+
+ # filea
+ apath = os.path.join(repo.path, "c/a")
+ self.assertFalse(os.path.exists(apath))
+
+ # dir c
+ cpath = os.path.join(repo.path, "c")
+ self.assertTrue(os.path.isdir(cpath))
+ self.assertEqual(index[b"c"].mode, S_IFGITLINK) # mode
+ self.assertEqual(index[b"c"].sha, c.id) # sha
+
+ def test_git_submodule_exists(self):
+ repo_dir = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, repo_dir)
+ with Repo.init(repo_dir) as repo:
+ filea = Blob.from_string(b"file alalala")
+
+ subtree = Tree()
+ subtree[b"a"] = (stat.S_IFREG | 0o644, filea.id)
+
+ c = Commit()
+ c.tree = subtree.id
+ c.committer = c.author = b"Somebody <somebody@example.com>"
+ c.commit_time = c.author_time = 42342
+ c.commit_timezone = c.author_timezone = 0
+ c.parents = []
+ c.message = b"Subcommit"
+
+ tree = Tree()
+ tree[b"c"] = (S_IFGITLINK, c.id)
+
+ os.mkdir(os.path.join(repo_dir, "c"))
+ repo.object_store.add_objects([(o, None) for o in [tree]])
+
+ build_index_from_tree(
+ repo.path, repo.index_path(), repo.object_store, tree.id
+ )
+
+ # Verify index entries
+ index = repo.open_index()
+ self.assertEqual(len(index), 1)
+
+ # filea
+ apath = os.path.join(repo.path, "c/a")
+ self.assertFalse(os.path.exists(apath))
+
+ # dir c
+ cpath = os.path.join(repo.path, "c")
+ self.assertTrue(os.path.isdir(cpath))
+ self.assertEqual(index[b"c"].mode, S_IFGITLINK) # mode
+ self.assertEqual(index[b"c"].sha, c.id) # sha
+
+
+class GetUnstagedChangesTests(TestCase):
+ def test_get_unstaged_changes(self):
+ """Unit test for get_unstaged_changes."""
+ repo_dir = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, repo_dir)
+ with Repo.init(repo_dir) as repo:
+ # Commit a dummy file then modify it
+ foo1_fullpath = os.path.join(repo_dir, "foo1")
+ with open(foo1_fullpath, "wb") as f:
+ f.write(b"origstuff")
+
+ foo2_fullpath = os.path.join(repo_dir, "foo2")
+ with open(foo2_fullpath, "wb") as f:
+ f.write(b"origstuff")
+
+ repo.stage(["foo1", "foo2"])
+ repo.do_commit(
+ b"test status",
+ author=b"author <email>",
+ committer=b"committer <email>",
+ )
+
+ with open(foo1_fullpath, "wb") as f:
+ f.write(b"newstuff")
+
+ # modify access and modify time of path
+ os.utime(foo1_fullpath, (0, 0))
+
+ changes = get_unstaged_changes(repo.open_index(), repo_dir)
+
+ self.assertEqual(list(changes), [b"foo1"])
+
+ def test_get_unstaged_deleted_changes(self):
+ """Unit test for get_unstaged_changes."""
+ repo_dir = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, repo_dir)
+ with Repo.init(repo_dir) as repo:
+ # Commit a dummy file then remove it
+ foo1_fullpath = os.path.join(repo_dir, "foo1")
+ with open(foo1_fullpath, "wb") as f:
+ f.write(b"origstuff")
+
+ repo.stage(["foo1"])
+ repo.do_commit(
+ b"test status",
+ author=b"author <email>",
+ committer=b"committer <email>",
+ )
+
+ os.unlink(foo1_fullpath)
+
+ changes = get_unstaged_changes(repo.open_index(), repo_dir)
+
+ self.assertEqual(list(changes), [b"foo1"])
+
+ def test_get_unstaged_changes_removed_replaced_by_directory(self):
+ """Unit test for get_unstaged_changes."""
+ repo_dir = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, repo_dir)
+ with Repo.init(repo_dir) as repo:
+ # Commit a dummy file then modify it
+ foo1_fullpath = os.path.join(repo_dir, "foo1")
+ with open(foo1_fullpath, "wb") as f:
+ f.write(b"origstuff")
+
+ repo.stage(["foo1"])
+ repo.do_commit(
+ b"test status",
+ author=b"author <email>",
+ committer=b"committer <email>",
+ )
+
+ os.remove(foo1_fullpath)
+ os.mkdir(foo1_fullpath)
+
+ changes = get_unstaged_changes(repo.open_index(), repo_dir)
+
+ self.assertEqual(list(changes), [b"foo1"])
+
+ @skipIf(not can_symlink(), "Requires symlink support")
+ def test_get_unstaged_changes_removed_replaced_by_link(self):
+ """Unit test for get_unstaged_changes."""
+ repo_dir = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, repo_dir)
+ with Repo.init(repo_dir) as repo:
+ # Commit a dummy file then modify it
+ foo1_fullpath = os.path.join(repo_dir, "foo1")
+ with open(foo1_fullpath, "wb") as f:
+ f.write(b"origstuff")
+
+ repo.stage(["foo1"])
+ repo.do_commit(
+ b"test status",
+ author=b"author <email>",
+ committer=b"committer <email>",
+ )
+
+ os.remove(foo1_fullpath)
+ os.symlink(os.path.dirname(foo1_fullpath), foo1_fullpath)
+
+ changes = get_unstaged_changes(repo.open_index(), repo_dir)
+
+ self.assertEqual(list(changes), [b"foo1"])
+
+
+class TestValidatePathElement(TestCase):
+ def test_default(self):
+ self.assertTrue(validate_path_element_default(b"bla"))
+ self.assertTrue(validate_path_element_default(b".bla"))
+ self.assertFalse(validate_path_element_default(b".git"))
+ self.assertFalse(validate_path_element_default(b".giT"))
+ self.assertFalse(validate_path_element_default(b".."))
+ self.assertTrue(validate_path_element_default(b"git~1"))
+
+ def test_ntfs(self):
+ self.assertTrue(validate_path_element_ntfs(b"bla"))
+ self.assertTrue(validate_path_element_ntfs(b".bla"))
+ self.assertFalse(validate_path_element_ntfs(b".git"))
+ self.assertFalse(validate_path_element_ntfs(b".giT"))
+ self.assertFalse(validate_path_element_ntfs(b".."))
+ self.assertFalse(validate_path_element_ntfs(b"git~1"))
+
+
+class TestTreeFSPathConversion(TestCase):
+ def test_tree_to_fs_path(self):
+ tree_path = "délwíçh/foo".encode()
+ fs_path = _tree_to_fs_path(b"/prefix/path", tree_path)
+ self.assertEqual(
+ fs_path,
+ os.fsencode(os.path.join("/prefix/path", "délwíçh", "foo")),
+ )
+
+ def test_fs_to_tree_path_str(self):
+ fs_path = os.path.join(os.path.join("délwíçh", "foo"))
+ tree_path = _fs_to_tree_path(fs_path)
+ self.assertEqual(tree_path, "délwíçh/foo".encode())
+
+ def test_fs_to_tree_path_bytes(self):
+ fs_path = os.path.join(os.fsencode(os.path.join("délwíçh", "foo")))
+ tree_path = _fs_to_tree_path(fs_path)
+ self.assertEqual(tree_path, "délwíçh/foo".encode())
blob - /dev/null
blob + 484b666d83887c12b08565cec57d64d867731ef5 (mode 644)
--- /dev/null
+++ tests/test_lfs.py
+# test_lfs.py -- tests for LFS
+# Copyright (C) 2020 Jelmer Vernooij <jelmer@jelmer.uk>
+#
+# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
+# General Public License as public by the Free Software Foundation; version 2.0
+# or (at your option) any later version. You can redistribute it and/or
+# modify it under the terms of either of these two licenses.
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# You should have received a copy of the licenses; if not, see
+# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
+# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
+# License, Version 2.0.
+#
+
+"""Tests for LFS support."""
+
+import shutil
+import tempfile
+
+from dulwich.lfs import LFSStore
+
+from . import TestCase
+
+
+class LFSTests(TestCase):
+ def setUp(self):
+ super().setUp()
+ self.test_dir = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, self.test_dir)
+ self.lfs = LFSStore.create(self.test_dir)
+
+ def test_create(self):
+ sha = self.lfs.write_object([b"a", b"b"])
+ with self.lfs.open_object(sha) as f:
+ self.assertEqual(b"ab", f.read())
+
+ def test_missing(self):
+ self.assertRaises(KeyError, self.lfs.open_object, "abcdeabcdeabcdeabcde")
blob - /dev/null
blob + 79254ebda150c6a4fee4d98aee78a15e38241a08 (mode 644)
--- /dev/null
+++ tests/test_line_ending.py
+# test_line_ending.py -- Tests for the line ending functions
+# Copyright (C) 2018-2019 Boris Feld <boris.feld@comet.ml>
+#
+# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
+# General Public License as public by the Free Software Foundation; version 2.0
+# or (at your option) any later version. You can redistribute it and/or
+# modify it under the terms of either of these two licenses.
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# You should have received a copy of the licenses; if not, see
+# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
+# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
+# License, Version 2.0.
+#
+
+"""Tests for the line ending conversion."""
+
+from dulwich.line_ending import (
+ convert_crlf_to_lf,
+ convert_lf_to_crlf,
+ get_checkin_filter_autocrlf,
+ get_checkout_filter_autocrlf,
+ normalize_blob,
+)
+from dulwich.objects import Blob
+
+from . import TestCase
+
+
+class LineEndingConversion(TestCase):
+ """Test the line ending conversion functions in various cases."""
+
+ def test_convert_crlf_to_lf_no_op(self):
+ self.assertEqual(convert_crlf_to_lf(b"foobar"), b"foobar")
+
+ def test_convert_crlf_to_lf(self):
+ self.assertEqual(convert_crlf_to_lf(b"line1\r\nline2"), b"line1\nline2")
+
+ def test_convert_crlf_to_lf_mixed(self):
+ self.assertEqual(convert_crlf_to_lf(b"line1\r\n\nline2"), b"line1\n\nline2")
+
+ def test_convert_lf_to_crlf_no_op(self):
+ self.assertEqual(convert_lf_to_crlf(b"foobar"), b"foobar")
+
+ def test_convert_lf_to_crlf(self):
+ self.assertEqual(convert_lf_to_crlf(b"line1\nline2"), b"line1\r\nline2")
+
+ def test_convert_lf_to_crlf_mixed(self):
+ self.assertEqual(convert_lf_to_crlf(b"line1\r\n\nline2"), b"line1\r\n\r\nline2")
+
+
+class GetLineEndingAutocrlfFilters(TestCase):
+ def test_get_checkin_filter_autocrlf_default(self):
+ checkin_filter = get_checkin_filter_autocrlf(b"false")
+
+ self.assertEqual(checkin_filter, None)
+
+ def test_get_checkin_filter_autocrlf_true(self):
+ checkin_filter = get_checkin_filter_autocrlf(b"true")
+
+ self.assertEqual(checkin_filter, convert_crlf_to_lf)
+
+ def test_get_checkin_filter_autocrlf_input(self):
+ checkin_filter = get_checkin_filter_autocrlf(b"input")
+
+ self.assertEqual(checkin_filter, convert_crlf_to_lf)
+
+ def test_get_checkout_filter_autocrlf_default(self):
+ checkout_filter = get_checkout_filter_autocrlf(b"false")
+
+ self.assertEqual(checkout_filter, None)
+
+ def test_get_checkout_filter_autocrlf_true(self):
+ checkout_filter = get_checkout_filter_autocrlf(b"true")
+
+ self.assertEqual(checkout_filter, convert_lf_to_crlf)
+
+ def test_get_checkout_filter_autocrlf_input(self):
+ checkout_filter = get_checkout_filter_autocrlf(b"input")
+
+ self.assertEqual(checkout_filter, None)
+
+
+class NormalizeBlobTestCase(TestCase):
+ def test_normalize_to_lf_no_op(self):
+ base_content = b"line1\nline2"
+ base_sha = "f8be7bb828880727816015d21abcbc37d033f233"
+
+ base_blob = Blob()
+ base_blob.set_raw_string(base_content)
+
+ self.assertEqual(base_blob.as_raw_chunks(), [base_content])
+ self.assertEqual(base_blob.sha().hexdigest(), base_sha)
+
+ filtered_blob = normalize_blob(
+ base_blob, convert_crlf_to_lf, binary_detection=False
+ )
+
+ self.assertEqual(filtered_blob.as_raw_chunks(), [base_content])
+ self.assertEqual(filtered_blob.sha().hexdigest(), base_sha)
+
+ def test_normalize_to_lf(self):
+ base_content = b"line1\r\nline2"
+ base_sha = "3a1bd7a52799fe5cf6411f1d35f4c10bacb1db96"
+
+ base_blob = Blob()
+ base_blob.set_raw_string(base_content)
+
+ self.assertEqual(base_blob.as_raw_chunks(), [base_content])
+ self.assertEqual(base_blob.sha().hexdigest(), base_sha)
+
+ filtered_blob = normalize_blob(
+ base_blob, convert_crlf_to_lf, binary_detection=False
+ )
+
+ normalized_content = b"line1\nline2"
+ normalized_sha = "f8be7bb828880727816015d21abcbc37d033f233"
+
+ self.assertEqual(filtered_blob.as_raw_chunks(), [normalized_content])
+ self.assertEqual(filtered_blob.sha().hexdigest(), normalized_sha)
+
+ def test_normalize_to_lf_binary(self):
+ base_content = b"line1\r\nline2\0"
+ base_sha = "b44504193b765f7cd79673812de8afb55b372ab2"
+
+ base_blob = Blob()
+ base_blob.set_raw_string(base_content)
+
+ self.assertEqual(base_blob.as_raw_chunks(), [base_content])
+ self.assertEqual(base_blob.sha().hexdigest(), base_sha)
+
+ filtered_blob = normalize_blob(
+ base_blob, convert_crlf_to_lf, binary_detection=True
+ )
+
+ self.assertEqual(filtered_blob.as_raw_chunks(), [base_content])
+ self.assertEqual(filtered_blob.sha().hexdigest(), base_sha)
+
+ def test_normalize_to_crlf_no_op(self):
+ base_content = b"line1\r\nline2"
+ base_sha = "3a1bd7a52799fe5cf6411f1d35f4c10bacb1db96"
+
+ base_blob = Blob()
+ base_blob.set_raw_string(base_content)
+
+ self.assertEqual(base_blob.as_raw_chunks(), [base_content])
+ self.assertEqual(base_blob.sha().hexdigest(), base_sha)
+
+ filtered_blob = normalize_blob(
+ base_blob, convert_lf_to_crlf, binary_detection=False
+ )
+
+ self.assertEqual(filtered_blob.as_raw_chunks(), [base_content])
+ self.assertEqual(filtered_blob.sha().hexdigest(), base_sha)
+
+ def test_normalize_to_crlf(self):
+ base_content = b"line1\nline2"
+ base_sha = "f8be7bb828880727816015d21abcbc37d033f233"
+
+ base_blob = Blob()
+ base_blob.set_raw_string(base_content)
+
+ self.assertEqual(base_blob.as_raw_chunks(), [base_content])
+ self.assertEqual(base_blob.sha().hexdigest(), base_sha)
+
+ filtered_blob = normalize_blob(
+ base_blob, convert_lf_to_crlf, binary_detection=False
+ )
+
+ normalized_content = b"line1\r\nline2"
+ normalized_sha = "3a1bd7a52799fe5cf6411f1d35f4c10bacb1db96"
+
+ self.assertEqual(filtered_blob.as_raw_chunks(), [normalized_content])
+ self.assertEqual(filtered_blob.sha().hexdigest(), normalized_sha)
+
+ def test_normalize_to_crlf_binary(self):
+ base_content = b"line1\r\nline2\0"
+ base_sha = "b44504193b765f7cd79673812de8afb55b372ab2"
+
+ base_blob = Blob()
+ base_blob.set_raw_string(base_content)
+
+ self.assertEqual(base_blob.as_raw_chunks(), [base_content])
+ self.assertEqual(base_blob.sha().hexdigest(), base_sha)
+
+ filtered_blob = normalize_blob(
+ base_blob, convert_lf_to_crlf, binary_detection=True
+ )
+
+ self.assertEqual(filtered_blob.as_raw_chunks(), [base_content])
+ self.assertEqual(filtered_blob.sha().hexdigest(), base_sha)
blob - /dev/null
blob + 343892c76692a676e3cd8abcb885ae0753a23c28 (mode 644)
--- /dev/null
+++ tests/test_lru_cache.py
+# Copyright (C) 2006, 2008 Canonical Ltd
+#
+# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
+# General Public License as public by the Free Software Foundation; version 2.0
+# or (at your option) any later version. You can redistribute it and/or
+# modify it under the terms of either of these two licenses.
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# You should have received a copy of the licenses; if not, see
+# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
+# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
+# License, Version 2.0.
+#
+
+"""Tests for the lru_cache module."""
+
+from dulwich import lru_cache
+
+from . import TestCase
+
+
+class TestLRUCache(TestCase):
+ """Test that LRU cache properly keeps track of entries."""
+
+ def test_cache_size(self):
+ cache = lru_cache.LRUCache(max_cache=10)
+ self.assertEqual(10, cache.cache_size())
+
+ cache = lru_cache.LRUCache(max_cache=256)
+ self.assertEqual(256, cache.cache_size())
+
+ cache.resize(512)
+ self.assertEqual(512, cache.cache_size())
+
+ def test_missing(self):
+ cache = lru_cache.LRUCache(max_cache=10)
+
+ self.assertNotIn("foo", cache)
+ self.assertRaises(KeyError, cache.__getitem__, "foo")
+
+ cache["foo"] = "bar"
+ self.assertEqual("bar", cache["foo"])
+ self.assertIn("foo", cache)
+ self.assertNotIn("bar", cache)
+
+ def test_map_None(self):
+ # Make sure that we can properly map None as a key.
+ cache = lru_cache.LRUCache(max_cache=10)
+ self.assertNotIn(None, cache)
+ cache[None] = 1
+ self.assertEqual(1, cache[None])
+ cache[None] = 2
+ self.assertEqual(2, cache[None])
+ # Test the various code paths of __getitem__, to make sure that we can
+ # handle when None is the key for the LRU and the MRU
+ cache[1] = 3
+ cache[None] = 1
+ cache[None]
+ cache[1]
+ cache[None]
+ self.assertEqual([None, 1], [n.key for n in cache._walk_lru()])
+
+ def test_add__null_key(self):
+ cache = lru_cache.LRUCache(max_cache=10)
+ self.assertRaises(ValueError, cache.add, lru_cache._null_key, 1)
+
+ def test_overflow(self):
+ """Adding extra entries will pop out old ones."""
+ cache = lru_cache.LRUCache(max_cache=1, after_cleanup_count=1)
+
+ cache["foo"] = "bar"
+ # With a max cache of 1, adding 'baz' should pop out 'foo'
+ cache["baz"] = "biz"
+
+ self.assertNotIn("foo", cache)
+ self.assertIn("baz", cache)
+
+ self.assertEqual("biz", cache["baz"])
+
+ def test_by_usage(self):
+ """Accessing entries bumps them up in priority."""
+ cache = lru_cache.LRUCache(max_cache=2)
+
+ cache["baz"] = "biz"
+ cache["foo"] = "bar"
+
+ self.assertEqual("biz", cache["baz"])
+
+ # This must kick out 'foo' because it was the last accessed
+ cache["nub"] = "in"
+
+ self.assertNotIn("foo", cache)
+
+ def test_cleanup(self):
+ """Test that we can use a cleanup function."""
+ cleanup_called = []
+
+ def cleanup_func(key, val):
+ cleanup_called.append((key, val))
+
+ cache = lru_cache.LRUCache(max_cache=2, after_cleanup_count=2)
+
+ cache.add("baz", "1", cleanup=cleanup_func)
+ cache.add("foo", "2", cleanup=cleanup_func)
+ cache.add("biz", "3", cleanup=cleanup_func)
+
+ self.assertEqual([("baz", "1")], cleanup_called)
+
+ # 'foo' is now most recent, so final cleanup will call it last
+ cache["foo"]
+ cache.clear()
+ self.assertEqual([("baz", "1"), ("biz", "3"), ("foo", "2")], cleanup_called)
+
+ def test_cleanup_on_replace(self):
+ """Replacing an object should cleanup the old value."""
+ cleanup_called = []
+
+ def cleanup_func(key, val):
+ cleanup_called.append((key, val))
+
+ cache = lru_cache.LRUCache(max_cache=2)
+ cache.add(1, 10, cleanup=cleanup_func)
+ cache.add(2, 20, cleanup=cleanup_func)
+ cache.add(2, 25, cleanup=cleanup_func)
+
+ self.assertEqual([(2, 20)], cleanup_called)
+ self.assertEqual(25, cache[2])
+
+ # Even __setitem__ should make sure cleanup() is called
+ cache[2] = 26
+ self.assertEqual([(2, 20), (2, 25)], cleanup_called)
+
+ def test_len(self):
+ cache = lru_cache.LRUCache(max_cache=10, after_cleanup_count=10)
+
+ cache[1] = 10
+ cache[2] = 20
+ cache[3] = 30
+ cache[4] = 40
+
+ self.assertEqual(4, len(cache))
+
+ cache[5] = 50
+ cache[6] = 60
+ cache[7] = 70
+ cache[8] = 80
+
+ self.assertEqual(8, len(cache))
+
+ cache[1] = 15 # replacement
+
+ self.assertEqual(8, len(cache))
+
+ cache[9] = 90
+ cache[10] = 100
+ cache[11] = 110
+
+ # We hit the max
+ self.assertEqual(10, len(cache))
+ self.assertEqual(
+ [11, 10, 9, 1, 8, 7, 6, 5, 4, 3],
+ [n.key for n in cache._walk_lru()],
+ )
+
+ def test_cleanup_shrinks_to_after_clean_count(self):
+ cache = lru_cache.LRUCache(max_cache=5, after_cleanup_count=3)
+
+ cache.add(1, 10)
+ cache.add(2, 20)
+ cache.add(3, 25)
+ cache.add(4, 30)
+ cache.add(5, 35)
+
+ self.assertEqual(5, len(cache))
+ # This will bump us over the max, which causes us to shrink down to
+ # after_cleanup_cache size
+ cache.add(6, 40)
+ self.assertEqual(3, len(cache))
+
+ def test_after_cleanup_larger_than_max(self):
+ cache = lru_cache.LRUCache(max_cache=5, after_cleanup_count=10)
+ self.assertEqual(5, cache._after_cleanup_count)
+
+ def test_after_cleanup_none(self):
+ cache = lru_cache.LRUCache(max_cache=5, after_cleanup_count=None)
+ # By default _after_cleanup_size is 80% of the normal size
+ self.assertEqual(4, cache._after_cleanup_count)
+
+ def test_cleanup_2(self):
+ cache = lru_cache.LRUCache(max_cache=5, after_cleanup_count=2)
+
+ # Add these in order
+ cache.add(1, 10)
+ cache.add(2, 20)
+ cache.add(3, 25)
+ cache.add(4, 30)
+ cache.add(5, 35)
+
+ self.assertEqual(5, len(cache))
+ # Force a compaction
+ cache.cleanup()
+ self.assertEqual(2, len(cache))
+
+ def test_preserve_last_access_order(self):
+ cache = lru_cache.LRUCache(max_cache=5)
+
+ # Add these in order
+ cache.add(1, 10)
+ cache.add(2, 20)
+ cache.add(3, 25)
+ cache.add(4, 30)
+ cache.add(5, 35)
+
+ self.assertEqual([5, 4, 3, 2, 1], [n.key for n in cache._walk_lru()])
+
+ # Now access some randomly
+ cache[2]
+ cache[5]
+ cache[3]
+ cache[2]
+ self.assertEqual([2, 3, 5, 4, 1], [n.key for n in cache._walk_lru()])
+
+ def test_get(self):
+ cache = lru_cache.LRUCache(max_cache=5)
+
+ cache.add(1, 10)
+ cache.add(2, 20)
+ self.assertEqual(20, cache.get(2))
+ self.assertEqual(None, cache.get(3))
+ obj = object()
+ self.assertIs(obj, cache.get(3, obj))
+ self.assertEqual([2, 1], [n.key for n in cache._walk_lru()])
+ self.assertEqual(10, cache.get(1))
+ self.assertEqual([1, 2], [n.key for n in cache._walk_lru()])
+
+ def test_keys(self):
+ cache = lru_cache.LRUCache(max_cache=5, after_cleanup_count=5)
+
+ cache[1] = 2
+ cache[2] = 3
+ cache[3] = 4
+ self.assertEqual([1, 2, 3], sorted(cache.keys()))
+ cache[4] = 5
+ cache[5] = 6
+ cache[6] = 7
+ self.assertEqual([2, 3, 4, 5, 6], sorted(cache.keys()))
+
+ def test_resize_smaller(self):
+ cache = lru_cache.LRUCache(max_cache=5, after_cleanup_count=4)
+ cache[1] = 2
+ cache[2] = 3
+ cache[3] = 4
+ cache[4] = 5
+ cache[5] = 6
+ self.assertEqual([1, 2, 3, 4, 5], sorted(cache.keys()))
+ cache[6] = 7
+ self.assertEqual([3, 4, 5, 6], sorted(cache.keys()))
+ # Now resize to something smaller, which triggers a cleanup
+ cache.resize(max_cache=3, after_cleanup_count=2)
+ self.assertEqual([5, 6], sorted(cache.keys()))
+ # Adding something will use the new size
+ cache[7] = 8
+ self.assertEqual([5, 6, 7], sorted(cache.keys()))
+ cache[8] = 9
+ self.assertEqual([7, 8], sorted(cache.keys()))
+
+ def test_resize_larger(self):
+ cache = lru_cache.LRUCache(max_cache=5, after_cleanup_count=4)
+ cache[1] = 2
+ cache[2] = 3
+ cache[3] = 4
+ cache[4] = 5
+ cache[5] = 6
+ self.assertEqual([1, 2, 3, 4, 5], sorted(cache.keys()))
+ cache[6] = 7
+ self.assertEqual([3, 4, 5, 6], sorted(cache.keys()))
+ cache.resize(max_cache=8, after_cleanup_count=6)
+ self.assertEqual([3, 4, 5, 6], sorted(cache.keys()))
+ cache[7] = 8
+ cache[8] = 9
+ cache[9] = 10
+ cache[10] = 11
+ self.assertEqual([3, 4, 5, 6, 7, 8, 9, 10], sorted(cache.keys()))
+ cache[11] = 12 # triggers cleanup back to new after_cleanup_count
+ self.assertEqual([6, 7, 8, 9, 10, 11], sorted(cache.keys()))
+
+
+class TestLRUSizeCache(TestCase):
+ def test_basic_init(self):
+ cache = lru_cache.LRUSizeCache()
+ self.assertEqual(2048, cache._max_cache)
+ self.assertEqual(int(cache._max_size * 0.8), cache._after_cleanup_size)
+ self.assertEqual(0, cache._value_size)
+
+ def test_add__null_key(self):
+ cache = lru_cache.LRUSizeCache()
+ self.assertRaises(ValueError, cache.add, lru_cache._null_key, 1)
+
+ def test_add_tracks_size(self):
+ cache = lru_cache.LRUSizeCache()
+ self.assertEqual(0, cache._value_size)
+ cache.add("my key", "my value text")
+ self.assertEqual(13, cache._value_size)
+
+ def test_remove_tracks_size(self):
+ cache = lru_cache.LRUSizeCache()
+ self.assertEqual(0, cache._value_size)
+ cache.add("my key", "my value text")
+ self.assertEqual(13, cache._value_size)
+ node = cache._cache["my key"]
+ cache._remove_node(node)
+ self.assertEqual(0, cache._value_size)
+
+ def test_no_add_over_size(self):
+ """Adding a large value may not be cached at all."""
+ cache = lru_cache.LRUSizeCache(max_size=10, after_cleanup_size=5)
+ self.assertEqual(0, cache._value_size)
+ self.assertEqual({}, cache.items())
+ cache.add("test", "key")
+ self.assertEqual(3, cache._value_size)
+ self.assertEqual({"test": "key"}, cache.items())
+ cache.add("test2", "key that is too big")
+ self.assertEqual(3, cache._value_size)
+ self.assertEqual({"test": "key"}, cache.items())
+ # If we would add a key, only to cleanup and remove all cached entries,
+ # then obviously that value should not be stored
+ cache.add("test3", "bigkey")
+ self.assertEqual(3, cache._value_size)
+ self.assertEqual({"test": "key"}, cache.items())
+
+ cache.add("test4", "bikey")
+ self.assertEqual(3, cache._value_size)
+ self.assertEqual({"test": "key"}, cache.items())
+
+ def test_no_add_over_size_cleanup(self):
+ """If a large value is not cached, we will call cleanup right away."""
+ cleanup_calls = []
+
+ def cleanup(key, value):
+ cleanup_calls.append((key, value))
+
+ cache = lru_cache.LRUSizeCache(max_size=10, after_cleanup_size=5)
+ self.assertEqual(0, cache._value_size)
+ self.assertEqual({}, cache.items())
+ cache.add("test", "key that is too big", cleanup=cleanup)
+ # key was not added
+ self.assertEqual(0, cache._value_size)
+ self.assertEqual({}, cache.items())
+ # and cleanup was called
+ self.assertEqual([("test", "key that is too big")], cleanup_calls)
+
+ def test_adding_clears_cache_based_on_size(self):
+ """The cache is cleared in LRU order until small enough."""
+ cache = lru_cache.LRUSizeCache(max_size=20)
+ cache.add("key1", "value") # 5 chars
+ cache.add("key2", "value2") # 6 chars
+ cache.add("key3", "value23") # 7 chars
+ self.assertEqual(5 + 6 + 7, cache._value_size)
+ cache["key2"] # reference key2 so it gets a newer reference time
+ cache.add("key4", "value234") # 8 chars, over limit
+ # We have to remove 2 keys to get back under limit
+ self.assertEqual(6 + 8, cache._value_size)
+ self.assertEqual({"key2": "value2", "key4": "value234"}, cache.items())
+
+ def test_adding_clears_to_after_cleanup_size(self):
+ cache = lru_cache.LRUSizeCache(max_size=20, after_cleanup_size=10)
+ cache.add("key1", "value") # 5 chars
+ cache.add("key2", "value2") # 6 chars
+ cache.add("key3", "value23") # 7 chars
+ self.assertEqual(5 + 6 + 7, cache._value_size)
+ cache["key2"] # reference key2 so it gets a newer reference time
+ cache.add("key4", "value234") # 8 chars, over limit
+ # We have to remove 3 keys to get back under limit
+ self.assertEqual(8, cache._value_size)
+ self.assertEqual({"key4": "value234"}, cache.items())
+
+ def test_custom_sizes(self):
+ def size_of_list(lst):
+ return sum(len(x) for x in lst)
+
+ cache = lru_cache.LRUSizeCache(
+ max_size=20, after_cleanup_size=10, compute_size=size_of_list
+ )
+
+ cache.add("key1", ["val", "ue"]) # 5 chars
+ cache.add("key2", ["val", "ue2"]) # 6 chars
+ cache.add("key3", ["val", "ue23"]) # 7 chars
+ self.assertEqual(5 + 6 + 7, cache._value_size)
+ cache["key2"] # reference key2 so it gets a newer reference time
+ cache.add("key4", ["value", "234"]) # 8 chars, over limit
+ # We have to remove 3 keys to get back under limit
+ self.assertEqual(8, cache._value_size)
+ self.assertEqual({"key4": ["value", "234"]}, cache.items())
+
+ def test_cleanup(self):
+ cache = lru_cache.LRUSizeCache(max_size=20, after_cleanup_size=10)
+
+ # Add these in order
+ cache.add("key1", "value") # 5 chars
+ cache.add("key2", "value2") # 6 chars
+ cache.add("key3", "value23") # 7 chars
+ self.assertEqual(5 + 6 + 7, cache._value_size)
+
+ cache.cleanup()
+ # Only the most recent fits after cleaning up
+ self.assertEqual(7, cache._value_size)
+
+ def test_keys(self):
+ cache = lru_cache.LRUSizeCache(max_size=10)
+
+ cache[1] = "a"
+ cache[2] = "b"
+ cache[3] = "cdef"
+ self.assertEqual([1, 2, 3], sorted(cache.keys()))
+
+ def test_resize_smaller(self):
+ cache = lru_cache.LRUSizeCache(max_size=10, after_cleanup_size=9)
+ cache[1] = "abc"
+ cache[2] = "def"
+ cache[3] = "ghi"
+ cache[4] = "jkl"
+ # Triggers a cleanup
+ self.assertEqual([2, 3, 4], sorted(cache.keys()))
+ # Resize should also cleanup again
+ cache.resize(max_size=6, after_cleanup_size=4)
+ self.assertEqual([4], sorted(cache.keys()))
+ # Adding should use the new max size
+ cache[5] = "mno"
+ self.assertEqual([4, 5], sorted(cache.keys()))
+ cache[6] = "pqr"
+ self.assertEqual([6], sorted(cache.keys()))
+
+ def test_resize_larger(self):
+ cache = lru_cache.LRUSizeCache(max_size=10, after_cleanup_size=9)
+ cache[1] = "abc"
+ cache[2] = "def"
+ cache[3] = "ghi"
+ cache[4] = "jkl"
+ # Triggers a cleanup
+ self.assertEqual([2, 3, 4], sorted(cache.keys()))
+ cache.resize(max_size=15, after_cleanup_size=12)
+ self.assertEqual([2, 3, 4], sorted(cache.keys()))
+ cache[5] = "mno"
+ cache[6] = "pqr"
+ self.assertEqual([2, 3, 4, 5, 6], sorted(cache.keys()))
+ cache[7] = "stu"
+ self.assertEqual([4, 5, 6, 7], sorted(cache.keys()))
blob - /dev/null
blob + 08a882a50924fa6180256562055ed5e6aa8a3e0a (mode 644)
--- /dev/null
+++ tests/test_mailmap.py
+# test_mailmap.py -- Tests for dulwich.mailmap
+# Copyright (C) 2018 Jelmer Vernooij <jelmer@jelmer.uk>
+#
+# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
+# General Public License as public by the Free Software Foundation; version 2.0
+# or (at your option) any later version. You can redistribute it and/or
+# modify it under the terms of either of these two licenses.
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# You should have received a copy of the licenses; if not, see
+# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
+# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
+# License, Version 2.0.
+#
+
+"""Tests for dulwich.mailmap."""
+
+from io import BytesIO
+from unittest import TestCase
+
+from dulwich.mailmap import Mailmap, read_mailmap
+
+
+class ReadMailmapTests(TestCase):
+ def test_read(self):
+ b = BytesIO(
+ b"""\
+Jane Doe <jane@desktop.(none)>
+Joe R. Developer <joe@example.com>
+# A comment
+<cto@company.xx> <cto@coompany.xx> # Comment
+Some Dude <some@dude.xx> nick1 <bugs@company.xx>
+Other Author <other@author.xx> nick2 <bugs@company.xx>
+Other Author <other@author.xx> <nick2@company.xx>
+Santa Claus <santa.claus@northpole.xx> <me@company.xx>
+"""
+ )
+ self.assertEqual(
+ [
+ ((b"Jane Doe", b"jane@desktop.(none)"), None),
+ ((b"Joe R. Developer", b"joe@example.com"), None),
+ ((None, b"cto@company.xx"), (None, b"cto@coompany.xx")),
+ (
+ (b"Some Dude", b"some@dude.xx"),
+ (b"nick1", b"bugs@company.xx"),
+ ),
+ (
+ (b"Other Author", b"other@author.xx"),
+ (b"nick2", b"bugs@company.xx"),
+ ),
+ (
+ (b"Other Author", b"other@author.xx"),
+ (None, b"nick2@company.xx"),
+ ),
+ (
+ (b"Santa Claus", b"santa.claus@northpole.xx"),
+ (None, b"me@company.xx"),
+ ),
+ ],
+ list(read_mailmap(b)),
+ )
+
+
+class MailmapTests(TestCase):
+ def test_lookup(self):
+ m = Mailmap()
+ m.add_entry((b"Jane Doe", b"jane@desktop.(none)"), (None, None))
+ m.add_entry((b"Joe R. Developer", b"joe@example.com"), None)
+ m.add_entry((None, b"cto@company.xx"), (None, b"cto@coompany.xx"))
+ m.add_entry((b"Some Dude", b"some@dude.xx"), (b"nick1", b"bugs@company.xx"))
+ m.add_entry(
+ (b"Other Author", b"other@author.xx"),
+ (b"nick2", b"bugs@company.xx"),
+ )
+ m.add_entry((b"Other Author", b"other@author.xx"), (None, b"nick2@company.xx"))
+ m.add_entry(
+ (b"Santa Claus", b"santa.claus@northpole.xx"),
+ (None, b"me@company.xx"),
+ )
+ self.assertEqual(
+ b"Jane Doe <jane@desktop.(none)>",
+ m.lookup(b"Jane Doe <jane@desktop.(none)>"),
+ )
+ self.assertEqual(
+ b"Jane Doe <jane@desktop.(none)>",
+ m.lookup(b"Jane Doe <jane@example.com>"),
+ )
+ self.assertEqual(
+ b"Jane Doe <jane@desktop.(none)>",
+ m.lookup(b"Jane D. <jane@desktop.(none)>"),
+ )
+ self.assertEqual(
+ b"Some Dude <some@dude.xx>", m.lookup(b"nick1 <bugs@company.xx>")
+ )
+ self.assertEqual(b"CTO <cto@company.xx>", m.lookup(b"CTO <cto@coompany.xx>"))
blob - /dev/null
blob + 7a6b79b5230c062a11cb89eb3f93c4ad0487879b (mode 644)
--- /dev/null
+++ tests/test_missing_obj_finder.py
+# test_missing_obj_finder.py -- tests for MissingObjectFinder
+# Copyright (C) 2012 syntevo GmbH
+#
+# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
+# General Public License as public by the Free Software Foundation; version 2.0
+# or (at your option) any later version. You can redistribute it and/or
+# modify it under the terms of either of these two licenses.
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# You should have received a copy of the licenses; if not, see
+# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
+# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
+# License, Version 2.0.
+#
+
+from dulwich.object_store import MemoryObjectStore, MissingObjectFinder
+from dulwich.objects import Blob
+
+from . import TestCase
+from .utils import build_commit_graph, make_object, make_tag
+
+
+class MissingObjectFinderTest(TestCase):
+ def setUp(self):
+ super().setUp()
+ self.store = MemoryObjectStore()
+ self.commits = []
+
+ def cmt(self, n):
+ return self.commits[n - 1]
+
+ def assertMissingMatch(self, haves, wants, expected):
+ for sha, path in MissingObjectFinder(self.store, haves, wants, shallow=set()):
+ self.assertIn(
+ sha, expected, f"({sha},{path}) erroneously reported as missing"
+ )
+ expected.remove(sha)
+
+ self.assertEqual(
+ len(expected),
+ 0,
+ f"some objects are not reported as missing: {expected}",
+ )
+
+
+class MOFLinearRepoTest(MissingObjectFinderTest):
+ def setUp(self):
+ super().setUp()
+ # present in 1, removed in 3
+ f1_1 = make_object(Blob, data=b"f1")
+ # present in all revisions, changed in 2 and 3
+ f2_1 = make_object(Blob, data=b"f2")
+ f2_2 = make_object(Blob, data=b"f2-changed")
+ f2_3 = make_object(Blob, data=b"f2-changed-again")
+ # added in 2, left unmodified in 3
+ f3_2 = make_object(Blob, data=b"f3")
+
+ commit_spec = [[1], [2, 1], [3, 2]]
+ trees = {
+ 1: [(b"f1", f1_1), (b"f2", f2_1)],
+ 2: [(b"f1", f1_1), (b"f2", f2_2), (b"f3", f3_2)],
+ 3: [(b"f2", f2_3), (b"f3", f3_2)],
+ }
+ # commit 1: f1 and f2
+ # commit 2: f3 added, f2 changed. Missing shall report commit id and a
+ # tree referenced by commit
+ # commit 3: f1 removed, f2 changed. Commit sha and root tree sha shall
+ # be reported as modified
+ self.commits = build_commit_graph(self.store, commit_spec, trees)
+ self.missing_1_2 = [self.cmt(2).id, self.cmt(2).tree, f2_2.id, f3_2.id]
+ self.missing_2_3 = [self.cmt(3).id, self.cmt(3).tree, f2_3.id]
+ self.missing_1_3 = [
+ self.cmt(2).id,
+ self.cmt(3).id,
+ self.cmt(2).tree,
+ self.cmt(3).tree,
+ f2_2.id,
+ f3_2.id,
+ f2_3.id,
+ ]
+
+ def test_1_to_2(self):
+ self.assertMissingMatch([self.cmt(1).id], [self.cmt(2).id], self.missing_1_2)
+
+ def test_2_to_3(self):
+ self.assertMissingMatch([self.cmt(2).id], [self.cmt(3).id], self.missing_2_3)
+
+ def test_1_to_3(self):
+ self.assertMissingMatch([self.cmt(1).id], [self.cmt(3).id], self.missing_1_3)
+
+ def test_bogus_haves(self):
+ """Ensure non-existent SHA in haves are tolerated."""
+ bogus_sha = self.cmt(2).id[::-1]
+ haves = [self.cmt(1).id, bogus_sha]
+ wants = [self.cmt(3).id]
+ self.assertMissingMatch(haves, wants, self.missing_1_3)
+
+ def test_bogus_wants_failure(self):
+ """Ensure non-existent SHA in wants are not tolerated."""
+ bogus_sha = self.cmt(2).id[::-1]
+ haves = [self.cmt(1).id]
+ wants = [self.cmt(3).id, bogus_sha]
+ self.assertRaises(
+ KeyError, MissingObjectFinder, self.store, haves, wants, shallow=set()
+ )
+
+ def test_no_changes(self):
+ self.assertMissingMatch([self.cmt(3).id], [self.cmt(3).id], [])
+
+
+class MOFMergeForkRepoTest(MissingObjectFinderTest):
+ # 1 --- 2 --- 4 --- 6 --- 7
+ # \ /
+ # 3 ---
+ # \
+ # 5
+
+ def setUp(self):
+ super().setUp()
+ f1_1 = make_object(Blob, data=b"f1")
+ f1_2 = make_object(Blob, data=b"f1-2")
+ f1_4 = make_object(Blob, data=b"f1-4")
+ f1_7 = make_object(Blob, data=b"f1-2") # same data as in rev 2
+ f2_1 = make_object(Blob, data=b"f2")
+ f2_3 = make_object(Blob, data=b"f2-3")
+ f3_3 = make_object(Blob, data=b"f3")
+ f3_5 = make_object(Blob, data=b"f3-5")
+ commit_spec = [[1], [2, 1], [3, 2], [4, 2], [5, 3], [6, 3, 4], [7, 6]]
+ trees = {
+ 1: [(b"f1", f1_1), (b"f2", f2_1)],
+ 2: [(b"f1", f1_2), (b"f2", f2_1)], # f1 changed
+ # f3 added, f2 changed
+ 3: [(b"f1", f1_2), (b"f2", f2_3), (b"f3", f3_3)],
+ 4: [(b"f1", f1_4), (b"f2", f2_1)], # f1 changed
+ 5: [(b"f1", f1_2), (b"f3", f3_5)], # f2 removed, f3 changed
+ # merged 3 and 4
+ 6: [(b"f1", f1_4), (b"f2", f2_3), (b"f3", f3_3)],
+ # f1 changed to match rev2. f3 removed
+ 7: [(b"f1", f1_7), (b"f2", f2_3)],
+ }
+ self.commits = build_commit_graph(self.store, commit_spec, trees)
+
+ self.f1_2_id = f1_2.id
+ self.f1_4_id = f1_4.id
+ self.f1_7_id = f1_7.id
+ self.f2_3_id = f2_3.id
+ self.f3_3_id = f3_3.id
+
+ self.assertEqual(f1_2.id, f1_7.id, "[sanity]")
+
+ def test_have6_want7(self):
+ # have 6, want 7. Ideally, shall not report f1_7 as it's the same as
+ # f1_2, however, to do so, MissingObjectFinder shall not record trees
+ # of common commits only, but also all parent trees and tree items,
+ # which is an overkill (i.e. in sha_done it records f1_4 as known, and
+ # doesn't record f1_2 was known prior to that, hence can't detect f1_7
+ # is in fact f1_2 and shall not be reported)
+ self.assertMissingMatch(
+ [self.cmt(6).id],
+ [self.cmt(7).id],
+ [self.cmt(7).id, self.cmt(7).tree, self.f1_7_id],
+ )
+
+ def test_have4_want7(self):
+ # have 4, want 7. Shall not include rev5 as it is not in the tree
+ # between 4 and 7 (well, it is, but its SHA's are irrelevant for 4..7
+ # commit hierarchy)
+ self.assertMissingMatch(
+ [self.cmt(4).id],
+ [self.cmt(7).id],
+ [
+ self.cmt(7).id,
+ self.cmt(6).id,
+ self.cmt(3).id,
+ self.cmt(7).tree,
+ self.cmt(6).tree,
+ self.cmt(3).tree,
+ self.f2_3_id,
+ self.f3_3_id,
+ ],
+ )
+
+ def test_have1_want6(self):
+ # have 1, want 6. Shall not include rev5
+ self.assertMissingMatch(
+ [self.cmt(1).id],
+ [self.cmt(6).id],
+ [
+ self.cmt(6).id,
+ self.cmt(4).id,
+ self.cmt(3).id,
+ self.cmt(2).id,
+ self.cmt(6).tree,
+ self.cmt(4).tree,
+ self.cmt(3).tree,
+ self.cmt(2).tree,
+ self.f1_2_id,
+ self.f1_4_id,
+ self.f2_3_id,
+ self.f3_3_id,
+ ],
+ )
+
+ def test_have3_want6(self):
+ # have 3, want 7. Shall not report rev2 and its tree, because
+ # haves(3) means has parents, i.e. rev2, too
+ # BUT shall report any changes descending rev2 (excluding rev3)
+ # Shall NOT report f1_7 as it's technically == f1_2
+ self.assertMissingMatch(
+ [self.cmt(3).id],
+ [self.cmt(7).id],
+ [
+ self.cmt(7).id,
+ self.cmt(6).id,
+ self.cmt(4).id,
+ self.cmt(7).tree,
+ self.cmt(6).tree,
+ self.cmt(4).tree,
+ self.f1_4_id,
+ ],
+ )
+
+ def test_have5_want7(self):
+ # have 5, want 7. Common parent is rev2, hence children of rev2 from
+ # a descent line other than rev5 shall be reported
+ # expects f1_4 from rev6. f3_5 is known in rev5;
+ # f1_7 shall be the same as f1_2 (known, too)
+ self.assertMissingMatch(
+ [self.cmt(5).id],
+ [self.cmt(7).id],
+ [
+ self.cmt(7).id,
+ self.cmt(6).id,
+ self.cmt(4).id,
+ self.cmt(7).tree,
+ self.cmt(6).tree,
+ self.cmt(4).tree,
+ self.f1_4_id,
+ ],
+ )
+
+
+class MOFTagsTest(MissingObjectFinderTest):
+ def setUp(self):
+ super().setUp()
+ f1_1 = make_object(Blob, data=b"f1")
+ commit_spec = [[1]]
+ trees = {1: [(b"f1", f1_1)]}
+ self.commits = build_commit_graph(self.store, commit_spec, trees)
+
+ self._normal_tag = make_tag(self.cmt(1))
+ self.store.add_object(self._normal_tag)
+
+ self._tag_of_tag = make_tag(self._normal_tag)
+ self.store.add_object(self._tag_of_tag)
+
+ self._tag_of_tree = make_tag(self.store[self.cmt(1).tree])
+ self.store.add_object(self._tag_of_tree)
+
+ self._tag_of_blob = make_tag(f1_1)
+ self.store.add_object(self._tag_of_blob)
+
+ self._tag_of_tag_of_blob = make_tag(self._tag_of_blob)
+ self.store.add_object(self._tag_of_tag_of_blob)
+
+ self.f1_1_id = f1_1.id
+
+ def test_tagged_commit(self):
+ # The user already has the tagged commit, all they want is the tag,
+ # so send them only the tag object.
+ self.assertMissingMatch(
+ [self.cmt(1).id], [self._normal_tag.id], [self._normal_tag.id]
+ )
+
+ # The remaining cases are unusual, but do happen in the wild.
+ def test_tagged_tag(self):
+ # User already has tagged tag, send only tag of tag
+ self.assertMissingMatch(
+ [self._normal_tag.id], [self._tag_of_tag.id], [self._tag_of_tag.id]
+ )
+ # User needs both tags, but already has commit
+ self.assertMissingMatch(
+ [self.cmt(1).id],
+ [self._tag_of_tag.id],
+ [self._normal_tag.id, self._tag_of_tag.id],
+ )
+
+ def test_tagged_tree(self):
+ self.assertMissingMatch(
+ [],
+ [self._tag_of_tree.id],
+ [self._tag_of_tree.id, self.cmt(1).tree, self.f1_1_id],
+ )
+
+ def test_tagged_blob(self):
+ self.assertMissingMatch(
+ [], [self._tag_of_blob.id], [self._tag_of_blob.id, self.f1_1_id]
+ )
+
+ def test_tagged_tagged_blob(self):
+ self.assertMissingMatch(
+ [],
+ [self._tag_of_tag_of_blob.id],
+ [self._tag_of_tag_of_blob.id, self._tag_of_blob.id, self.f1_1_id],
+ )
blob - /dev/null
blob + b9d78e8aa227c57b0163118e3a4690b9e562d896 (mode 644)
--- /dev/null
+++ tests/test_object_store.py
+# test_object_store.py -- tests for object_store.py
+# Copyright (C) 2008 Jelmer Vernooij <jelmer@jelmer.uk>
+#
+# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
+# General Public License as public by the Free Software Foundation; version 2.0
+# or (at your option) any later version. You can redistribute it and/or
+# modify it under the terms of either of these two licenses.
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# You should have received a copy of the licenses; if not, see
+# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
+# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
+# License, Version 2.0.
+#
+
+"""Tests for the object store interface."""
+
+import os
+import shutil
+import stat
+import sys
+import tempfile
+from contextlib import closing
+from io import BytesIO
+from unittest import skipUnless
+
+from dulwich.errors import NotTreeError
+from dulwich.index import commit_tree
+from dulwich.object_store import (
+ DiskObjectStore,
+ MemoryObjectStore,
+ ObjectStoreGraphWalker,
+ OverlayObjectStore,
+ commit_tree_changes,
+ iter_tree_contents,
+ peel_sha,
+ read_packs_file,
+ tree_lookup_path,
+)
+from dulwich.objects import (
+ S_IFGITLINK,
+ Blob,
+ EmptyFileException,
+ SubmoduleEncountered,
+ Tree,
+ TreeEntry,
+ sha_to_hex,
+)
+from dulwich.pack import REF_DELTA, write_pack_objects
+from dulwich.protocol import DEPTH_INFINITE
+
+from . import TestCase
+from .utils import build_pack, make_object, make_tag
+
+try:
+ from unittest.mock import patch
+except ImportError:
+ patch = None # type: ignore
+
+
+testobject = make_object(Blob, data=b"yummy data")
+
+
+class ObjectStoreTests:
+ def test_determine_wants_all(self):
+ self.assertEqual(
+ [b"1" * 40],
+ self.store.determine_wants_all({b"refs/heads/foo": b"1" * 40}),
+ )
+
+ def test_determine_wants_all_zero(self):
+ self.assertEqual(
+ [], self.store.determine_wants_all({b"refs/heads/foo": b"0" * 40})
+ )
+
+ @skipUnless(patch, "Required mock.patch")
+ def test_determine_wants_all_depth(self):
+ self.store.add_object(testobject)
+ refs = {b"refs/heads/foo": testobject.id}
+ with patch.object(self.store, "_get_depth", return_value=1) as m:
+ self.assertEqual([], self.store.determine_wants_all(refs, depth=0))
+ self.assertEqual(
+ [testobject.id],
+ self.store.determine_wants_all(refs, depth=DEPTH_INFINITE),
+ )
+ m.assert_not_called()
+
+ self.assertEqual([], self.store.determine_wants_all(refs, depth=1))
+ m.assert_called_with(testobject.id)
+ self.assertEqual(
+ [testobject.id], self.store.determine_wants_all(refs, depth=2)
+ )
+
+ def test_get_depth(self):
+ self.assertEqual(0, self.store._get_depth(testobject.id))
+
+ self.store.add_object(testobject)
+ self.assertEqual(
+ 1, self.store._get_depth(testobject.id, get_parents=lambda x: [])
+ )
+
+ parent = make_object(Blob, data=b"parent data")
+ self.store.add_object(parent)
+ self.assertEqual(
+ 2,
+ self.store._get_depth(
+ testobject.id,
+ get_parents=lambda x: [parent.id] if x == testobject else [],
+ ),
+ )
+
+ def test_iter(self):
+ self.assertEqual([], list(self.store))
+
+ def test_get_nonexistant(self):
+ self.assertRaises(KeyError, lambda: self.store[b"a" * 40])
+
+ def test_contains_nonexistant(self):
+ self.assertNotIn(b"a" * 40, self.store)
+
+ def test_add_objects_empty(self):
+ self.store.add_objects([])
+
+ def test_add_commit(self):
+ # TODO: Argh, no way to construct Git commit objects without
+ # access to a serialized form.
+ self.store.add_objects([])
+
+ def test_store_resilience(self):
+ """Test if updating an existing stored object doesn't erase the
+ object from the store.
+ """
+ test_object = make_object(Blob, data=b"data")
+
+ self.store.add_object(test_object)
+ test_object_id = test_object.id
+ test_object.data = test_object.data + b"update"
+ stored_test_object = self.store[test_object_id]
+
+ self.assertNotEqual(test_object.id, stored_test_object.id)
+ self.assertEqual(stored_test_object.id, test_object_id)
+
+ def test_add_object(self):
+ self.store.add_object(testobject)
+ self.assertEqual({testobject.id}, set(self.store))
+ self.assertIn(testobject.id, self.store)
+ r = self.store[testobject.id]
+ self.assertEqual(r, testobject)
+
+ def test_add_objects(self):
+ data = [(testobject, "mypath")]
+ self.store.add_objects(data)
+ self.assertEqual({testobject.id}, set(self.store))
+ self.assertIn(testobject.id, self.store)
+ r = self.store[testobject.id]
+ self.assertEqual(r, testobject)
+
+ def test_tree_changes(self):
+ blob_a1 = make_object(Blob, data=b"a1")
+ blob_a2 = make_object(Blob, data=b"a2")
+ blob_b = make_object(Blob, data=b"b")
+ for blob in [blob_a1, blob_a2, blob_b]:
+ self.store.add_object(blob)
+
+ blobs_1 = [(b"a", blob_a1.id, 0o100644), (b"b", blob_b.id, 0o100644)]
+ tree1_id = commit_tree(self.store, blobs_1)
+ blobs_2 = [(b"a", blob_a2.id, 0o100644), (b"b", blob_b.id, 0o100644)]
+ tree2_id = commit_tree(self.store, blobs_2)
+ change_a = (
+ (b"a", b"a"),
+ (0o100644, 0o100644),
+ (blob_a1.id, blob_a2.id),
+ )
+ self.assertEqual([change_a], list(self.store.tree_changes(tree1_id, tree2_id)))
+ self.assertEqual(
+ [
+ change_a,
+ ((b"b", b"b"), (0o100644, 0o100644), (blob_b.id, blob_b.id)),
+ ],
+ list(self.store.tree_changes(tree1_id, tree2_id, want_unchanged=True)),
+ )
+
+ def test_iter_tree_contents(self):
+ blob_a = make_object(Blob, data=b"a")
+ blob_b = make_object(Blob, data=b"b")
+ blob_c = make_object(Blob, data=b"c")
+ for blob in [blob_a, blob_b, blob_c]:
+ self.store.add_object(blob)
+
+ blobs = [
+ (b"a", blob_a.id, 0o100644),
+ (b"ad/b", blob_b.id, 0o100644),
+ (b"ad/bd/c", blob_c.id, 0o100755),
+ (b"ad/c", blob_c.id, 0o100644),
+ (b"c", blob_c.id, 0o100644),
+ ]
+ tree_id = commit_tree(self.store, blobs)
+ self.assertEqual(
+ [TreeEntry(p, m, h) for (p, h, m) in blobs],
+ list(iter_tree_contents(self.store, tree_id)),
+ )
+ self.assertEqual([], list(iter_tree_contents(self.store, None)))
+
+ def test_iter_tree_contents_include_trees(self):
+ blob_a = make_object(Blob, data=b"a")
+ blob_b = make_object(Blob, data=b"b")
+ blob_c = make_object(Blob, data=b"c")
+ for blob in [blob_a, blob_b, blob_c]:
+ self.store.add_object(blob)
+
+ blobs = [
+ (b"a", blob_a.id, 0o100644),
+ (b"ad/b", blob_b.id, 0o100644),
+ (b"ad/bd/c", blob_c.id, 0o100755),
+ ]
+ tree_id = commit_tree(self.store, blobs)
+ tree = self.store[tree_id]
+ tree_ad = self.store[tree[b"ad"][1]]
+ tree_bd = self.store[tree_ad[b"bd"][1]]
+
+ expected = [
+ TreeEntry(b"", 0o040000, tree_id),
+ TreeEntry(b"a", 0o100644, blob_a.id),
+ TreeEntry(b"ad", 0o040000, tree_ad.id),
+ TreeEntry(b"ad/b", 0o100644, blob_b.id),
+ TreeEntry(b"ad/bd", 0o040000, tree_bd.id),
+ TreeEntry(b"ad/bd/c", 0o100755, blob_c.id),
+ ]
+ actual = iter_tree_contents(self.store, tree_id, include_trees=True)
+ self.assertEqual(expected, list(actual))
+
+ def make_tag(self, name, obj):
+ tag = make_tag(obj, name=name)
+ self.store.add_object(tag)
+ return tag
+
+ def test_peel_sha(self):
+ self.store.add_object(testobject)
+ tag1 = self.make_tag(b"1", testobject)
+ tag2 = self.make_tag(b"2", testobject)
+ tag3 = self.make_tag(b"3", testobject)
+ for obj in [testobject, tag1, tag2, tag3]:
+ self.assertEqual((obj, testobject), peel_sha(self.store, obj.id))
+
+ def test_get_raw(self):
+ self.store.add_object(testobject)
+ self.assertEqual(
+ (Blob.type_num, b"yummy data"), self.store.get_raw(testobject.id)
+ )
+
+ def test_close(self):
+ # For now, just check that close doesn't barf.
+ self.store.add_object(testobject)
+ self.store.close()
+
+
+class OverlayObjectStoreTests(ObjectStoreTests, TestCase):
+ def setUp(self):
+ TestCase.setUp(self)
+ self.bases = [MemoryObjectStore(), MemoryObjectStore()]
+ self.store = OverlayObjectStore(self.bases, self.bases[0])
+
+
+class MemoryObjectStoreTests(ObjectStoreTests, TestCase):
+ def setUp(self):
+ TestCase.setUp(self)
+ self.store = MemoryObjectStore()
+
+ def test_add_pack(self):
+ o = MemoryObjectStore()
+ f, commit, abort = o.add_pack()
+ try:
+ b = make_object(Blob, data=b"more yummy data")
+ write_pack_objects(f.write, [(b, None)])
+ except BaseException:
+ abort()
+ raise
+ else:
+ commit()
+
+ def test_add_pack_emtpy(self):
+ o = MemoryObjectStore()
+ f, commit, abort = o.add_pack()
+ commit()
+
+ def test_add_thin_pack(self):
+ o = MemoryObjectStore()
+ blob = make_object(Blob, data=b"yummy data")
+ o.add_object(blob)
+
+ f = BytesIO()
+ entries = build_pack(
+ f,
+ [
+ (REF_DELTA, (blob.id, b"more yummy data")),
+ ],
+ store=o,
+ )
+ o.add_thin_pack(f.read, None)
+ packed_blob_sha = sha_to_hex(entries[0][3])
+ self.assertEqual(
+ (Blob.type_num, b"more yummy data"), o.get_raw(packed_blob_sha)
+ )
+
+ def test_add_thin_pack_empty(self):
+ o = MemoryObjectStore()
+
+ f = BytesIO()
+ entries = build_pack(f, [], store=o)
+ self.assertEqual([], entries)
+ o.add_thin_pack(f.read, None)
+
+
+class PackBasedObjectStoreTests(ObjectStoreTests):
+ def tearDown(self):
+ for pack in self.store.packs:
+ pack.close()
+
+ def test_empty_packs(self):
+ self.assertEqual([], list(self.store.packs))
+
+ def test_pack_loose_objects(self):
+ b1 = make_object(Blob, data=b"yummy data")
+ self.store.add_object(b1)
+ b2 = make_object(Blob, data=b"more yummy data")
+ self.store.add_object(b2)
+ b3 = make_object(Blob, data=b"even more yummy data")
+ b4 = make_object(Blob, data=b"and more yummy data")
+ self.store.add_objects([(b3, None), (b4, None)])
+ self.assertEqual({b1.id, b2.id, b3.id, b4.id}, set(self.store))
+ self.assertEqual(1, len(self.store.packs))
+ self.assertEqual(2, self.store.pack_loose_objects())
+ self.assertNotEqual([], list(self.store.packs))
+ self.assertEqual(0, self.store.pack_loose_objects())
+
+ def test_repack(self):
+ b1 = make_object(Blob, data=b"yummy data")
+ self.store.add_object(b1)
+ b2 = make_object(Blob, data=b"more yummy data")
+ self.store.add_object(b2)
+ b3 = make_object(Blob, data=b"even more yummy data")
+ b4 = make_object(Blob, data=b"and more yummy data")
+ self.store.add_objects([(b3, None), (b4, None)])
+ b5 = make_object(Blob, data=b"and more data")
+ b6 = make_object(Blob, data=b"and some more data")
+ self.store.add_objects([(b5, None), (b6, None)])
+ self.assertEqual({b1.id, b2.id, b3.id, b4.id, b5.id, b6.id}, set(self.store))
+ self.assertEqual(2, len(self.store.packs))
+ self.assertEqual(6, self.store.repack())
+ self.assertEqual(1, len(self.store.packs))
+ self.assertEqual(0, self.store.pack_loose_objects())
+
+ def test_repack_existing(self):
+ b1 = make_object(Blob, data=b"yummy data")
+ self.store.add_object(b1)
+ b2 = make_object(Blob, data=b"more yummy data")
+ self.store.add_object(b2)
+ self.store.add_objects([(b1, None), (b2, None)])
+ self.store.add_objects([(b2, None)])
+ self.assertEqual({b1.id, b2.id}, set(self.store))
+ self.assertEqual(2, len(self.store.packs))
+ self.assertEqual(2, self.store.repack())
+ self.assertEqual(1, len(self.store.packs))
+ self.assertEqual(0, self.store.pack_loose_objects())
+
+ self.assertEqual({b1.id, b2.id}, set(self.store))
+ self.assertEqual(1, len(self.store.packs))
+ self.assertEqual(2, self.store.repack())
+ self.assertEqual(1, len(self.store.packs))
+ self.assertEqual(0, self.store.pack_loose_objects())
+
+
+class DiskObjectStoreTests(PackBasedObjectStoreTests, TestCase):
+ def setUp(self):
+ TestCase.setUp(self)
+ self.store_dir = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, self.store_dir)
+ self.store = DiskObjectStore.init(self.store_dir)
+
+ def tearDown(self):
+ TestCase.tearDown(self)
+ PackBasedObjectStoreTests.tearDown(self)
+
+ def test_loose_compression_level(self):
+ alternate_dir = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, alternate_dir)
+ alternate_store = DiskObjectStore(alternate_dir, loose_compression_level=6)
+ b2 = make_object(Blob, data=b"yummy data")
+ alternate_store.add_object(b2)
+
+ def test_alternates(self):
+ alternate_dir = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, alternate_dir)
+ alternate_store = DiskObjectStore(alternate_dir)
+ b2 = make_object(Blob, data=b"yummy data")
+ alternate_store.add_object(b2)
+ store = DiskObjectStore(self.store_dir)
+ self.assertRaises(KeyError, store.__getitem__, b2.id)
+ store.add_alternate_path(alternate_dir)
+ self.assertIn(b2.id, store)
+ self.assertEqual(b2, store[b2.id])
+
+ def test_read_alternate_paths(self):
+ store = DiskObjectStore(self.store_dir)
+
+ abs_path = os.path.abspath(os.path.normpath("/abspath"))
+ # ensures in particular existence of the alternates file
+ store.add_alternate_path(abs_path)
+ self.assertEqual(set(store._read_alternate_paths()), {abs_path})
+
+ store.add_alternate_path("relative-path")
+ self.assertIn(
+ os.path.join(store.path, "relative-path"),
+ set(store._read_alternate_paths()),
+ )
+
+ # arguably, add_alternate_path() could strip comments.
+ # Meanwhile it's more convenient to use it than to import INFODIR
+ store.add_alternate_path("# comment")
+ for alt_path in store._read_alternate_paths():
+ self.assertNotIn("#", alt_path)
+
+ def test_file_modes(self):
+ self.store.add_object(testobject)
+ path = self.store._get_shafile_path(testobject.id)
+ mode = os.stat(path).st_mode
+
+ packmode = "0o100444" if sys.platform != "win32" else "0o100666"
+ self.assertEqual(oct(mode), packmode)
+
+ def test_corrupted_object_raise_exception(self):
+ """Corrupted sha1 disk file should raise specific exception."""
+ self.store.add_object(testobject)
+ self.assertEqual(
+ (Blob.type_num, b"yummy data"), self.store.get_raw(testobject.id)
+ )
+ self.assertTrue(self.store.contains_loose(testobject.id))
+ self.assertIsNotNone(self.store._get_loose_object(testobject.id))
+
+ path = self.store._get_shafile_path(testobject.id)
+ old_mode = os.stat(path).st_mode
+ os.chmod(path, 0o600)
+ with open(path, "wb") as f: # corrupt the file
+ f.write(b"")
+ os.chmod(path, old_mode)
+
+ expected_error_msg = "Corrupted empty file detected"
+ try:
+ self.store.contains_loose(testobject.id)
+ except EmptyFileException as e:
+ self.assertEqual(str(e), expected_error_msg)
+
+ try:
+ self.store._get_loose_object(testobject.id)
+ except EmptyFileException as e:
+ self.assertEqual(str(e), expected_error_msg)
+
+ # this does not change iteration on loose objects though
+ self.assertEqual([testobject.id], list(self.store._iter_loose_objects()))
+
+ def test_tempfile_in_loose_store(self):
+ self.store.add_object(testobject)
+ self.assertEqual([testobject.id], list(self.store._iter_loose_objects()))
+
+ # add temporary files to the loose store
+ for i in range(256):
+ dirname = os.path.join(self.store_dir, "%02x" % i)
+ if not os.path.isdir(dirname):
+ os.makedirs(dirname)
+ fd, n = tempfile.mkstemp(prefix="tmp_obj_", dir=dirname)
+ os.close(fd)
+
+ self.assertEqual([testobject.id], list(self.store._iter_loose_objects()))
+
+ def test_add_alternate_path(self):
+ store = DiskObjectStore(self.store_dir)
+ self.assertEqual([], list(store._read_alternate_paths()))
+ store.add_alternate_path("/foo/path")
+ self.assertEqual(["/foo/path"], list(store._read_alternate_paths()))
+ store.add_alternate_path("/bar/path")
+ self.assertEqual(
+ ["/foo/path", "/bar/path"], list(store._read_alternate_paths())
+ )
+
+ def test_rel_alternative_path(self):
+ alternate_dir = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, alternate_dir)
+ alternate_store = DiskObjectStore(alternate_dir)
+ b2 = make_object(Blob, data=b"yummy data")
+ alternate_store.add_object(b2)
+ store = DiskObjectStore(self.store_dir)
+ self.assertRaises(KeyError, store.__getitem__, b2.id)
+ store.add_alternate_path(os.path.relpath(alternate_dir, self.store_dir))
+ self.assertEqual(list(alternate_store), list(store.alternates[0]))
+ self.assertIn(b2.id, store)
+ self.assertEqual(b2, store[b2.id])
+
+ def test_pack_dir(self):
+ o = DiskObjectStore(self.store_dir)
+ self.assertEqual(os.path.join(self.store_dir, "pack"), o.pack_dir)
+
+ def test_add_pack(self):
+ o = DiskObjectStore(self.store_dir)
+ self.addCleanup(o.close)
+ f, commit, abort = o.add_pack()
+ try:
+ b = make_object(Blob, data=b"more yummy data")
+ write_pack_objects(f.write, [(b, None)])
+ except BaseException:
+ abort()
+ raise
+ else:
+ commit()
+
+ def test_add_thin_pack(self):
+ o = DiskObjectStore(self.store_dir)
+ try:
+ blob = make_object(Blob, data=b"yummy data")
+ o.add_object(blob)
+
+ f = BytesIO()
+ entries = build_pack(
+ f,
+ [
+ (REF_DELTA, (blob.id, b"more yummy data")),
+ ],
+ store=o,
+ )
+
+ with o.add_thin_pack(f.read, None) as pack:
+ packed_blob_sha = sha_to_hex(entries[0][3])
+ pack.check_length_and_checksum()
+ self.assertEqual(sorted([blob.id, packed_blob_sha]), list(pack))
+ self.assertTrue(o.contains_packed(packed_blob_sha))
+ self.assertTrue(o.contains_packed(blob.id))
+ self.assertEqual(
+ (Blob.type_num, b"more yummy data"),
+ o.get_raw(packed_blob_sha),
+ )
+ finally:
+ o.close()
+
+ def test_add_thin_pack_empty(self):
+ with closing(DiskObjectStore(self.store_dir)) as o:
+ f = BytesIO()
+ entries = build_pack(f, [], store=o)
+ self.assertEqual([], entries)
+ o.add_thin_pack(f.read, None)
+
+
+class TreeLookupPathTests(TestCase):
+ def setUp(self):
+ TestCase.setUp(self)
+ self.store = MemoryObjectStore()
+ blob_a = make_object(Blob, data=b"a")
+ blob_b = make_object(Blob, data=b"b")
+ blob_c = make_object(Blob, data=b"c")
+ for blob in [blob_a, blob_b, blob_c]:
+ self.store.add_object(blob)
+
+ blobs = [
+ (b"a", blob_a.id, 0o100644),
+ (b"ad/b", blob_b.id, 0o100644),
+ (b"ad/bd/c", blob_c.id, 0o100755),
+ (b"ad/c", blob_c.id, 0o100644),
+ (b"c", blob_c.id, 0o100644),
+ (b"d", blob_c.id, S_IFGITLINK),
+ ]
+ self.tree_id = commit_tree(self.store, blobs)
+
+ def get_object(self, sha):
+ return self.store[sha]
+
+ def test_lookup_blob(self):
+ o_id = tree_lookup_path(self.get_object, self.tree_id, b"a")[1]
+ self.assertIsInstance(self.store[o_id], Blob)
+
+ def test_lookup_tree(self):
+ o_id = tree_lookup_path(self.get_object, self.tree_id, b"ad")[1]
+ self.assertIsInstance(self.store[o_id], Tree)
+ o_id = tree_lookup_path(self.get_object, self.tree_id, b"ad/bd")[1]
+ self.assertIsInstance(self.store[o_id], Tree)
+ o_id = tree_lookup_path(self.get_object, self.tree_id, b"ad/bd/")[1]
+ self.assertIsInstance(self.store[o_id], Tree)
+
+ def test_lookup_submodule(self):
+ tree_lookup_path(self.get_object, self.tree_id, b"d")[1]
+ self.assertRaises(
+ SubmoduleEncountered,
+ tree_lookup_path,
+ self.get_object,
+ self.tree_id,
+ b"d/a",
+ )
+
+ def test_lookup_nonexistent(self):
+ self.assertRaises(
+ KeyError, tree_lookup_path, self.get_object, self.tree_id, b"j"
+ )
+
+ def test_lookup_not_tree(self):
+ self.assertRaises(
+ NotTreeError,
+ tree_lookup_path,
+ self.get_object,
+ self.tree_id,
+ b"ad/b/j",
+ )
+
+
+class ObjectStoreGraphWalkerTests(TestCase):
+ def get_walker(self, heads, parent_map):
+ new_parent_map = {
+ k * 40: [(p * 40) for p in ps] for (k, ps) in parent_map.items()
+ }
+ return ObjectStoreGraphWalker(
+ [x * 40 for x in heads], new_parent_map.__getitem__
+ )
+
+ def test_ack_invalid_value(self):
+ gw = self.get_walker([], {})
+ self.assertRaises(ValueError, gw.ack, "tooshort")
+
+ def test_empty(self):
+ gw = self.get_walker([], {})
+ self.assertIs(None, next(gw))
+ gw.ack(b"a" * 40)
+ self.assertIs(None, next(gw))
+
+ def test_descends(self):
+ gw = self.get_walker([b"a"], {b"a": [b"b"], b"b": []})
+ self.assertEqual(b"a" * 40, next(gw))
+ self.assertEqual(b"b" * 40, next(gw))
+
+ def test_present(self):
+ gw = self.get_walker([b"a"], {b"a": [b"b"], b"b": []})
+ gw.ack(b"a" * 40)
+ self.assertIs(None, next(gw))
+
+ def test_parent_present(self):
+ gw = self.get_walker([b"a"], {b"a": [b"b"], b"b": []})
+ self.assertEqual(b"a" * 40, next(gw))
+ gw.ack(b"a" * 40)
+ self.assertIs(None, next(gw))
+
+ def test_child_ack_later(self):
+ gw = self.get_walker([b"a"], {b"a": [b"b"], b"b": [b"c"], b"c": []})
+ self.assertEqual(b"a" * 40, next(gw))
+ self.assertEqual(b"b" * 40, next(gw))
+ gw.ack(b"a" * 40)
+ self.assertIs(None, next(gw))
+
+ def test_only_once(self):
+ # a b
+ # | |
+ # c d
+ # \ /
+ # e
+ gw = self.get_walker(
+ [b"a", b"b"],
+ {
+ b"a": [b"c"],
+ b"b": [b"d"],
+ b"c": [b"e"],
+ b"d": [b"e"],
+ b"e": [],
+ },
+ )
+ walk = []
+ acked = False
+ walk.append(next(gw))
+ walk.append(next(gw))
+ # A branch (a, c) or (b, d) may be done after 2 steps or 3 depending on
+ # the order walked: 3-step walks include (a, b, c) and (b, a, d), etc.
+ if walk == [b"a" * 40, b"c" * 40] or walk == [b"b" * 40, b"d" * 40]:
+ gw.ack(walk[0])
+ acked = True
+
+ walk.append(next(gw))
+ if not acked and walk[2] == b"c" * 40:
+ gw.ack(b"a" * 40)
+ elif not acked and walk[2] == b"d" * 40:
+ gw.ack(b"b" * 40)
+ walk.append(next(gw))
+ self.assertIs(None, next(gw))
+
+ self.assertEqual([b"a" * 40, b"b" * 40, b"c" * 40, b"d" * 40], sorted(walk))
+ self.assertLess(walk.index(b"a" * 40), walk.index(b"c" * 40))
+ self.assertLess(walk.index(b"b" * 40), walk.index(b"d" * 40))
+
+
+class CommitTreeChangesTests(TestCase):
+ def setUp(self):
+ super().setUp()
+ self.store = MemoryObjectStore()
+ self.blob_a = make_object(Blob, data=b"a")
+ self.blob_b = make_object(Blob, data=b"b")
+ self.blob_c = make_object(Blob, data=b"c")
+ for blob in [self.blob_a, self.blob_b, self.blob_c]:
+ self.store.add_object(blob)
+
+ blobs = [
+ (b"a", self.blob_a.id, 0o100644),
+ (b"ad/b", self.blob_b.id, 0o100644),
+ (b"ad/bd/c", self.blob_c.id, 0o100755),
+ (b"ad/c", self.blob_c.id, 0o100644),
+ (b"c", self.blob_c.id, 0o100644),
+ ]
+ self.tree_id = commit_tree(self.store, blobs)
+
+ def test_no_changes(self):
+ self.assertEqual(
+ self.store[self.tree_id],
+ commit_tree_changes(self.store, self.store[self.tree_id], []),
+ )
+
+ def test_add_blob(self):
+ blob_d = make_object(Blob, data=b"d")
+ new_tree = commit_tree_changes(
+ self.store, self.store[self.tree_id], [(b"d", 0o100644, blob_d.id)]
+ )
+ self.assertEqual(
+ new_tree[b"d"],
+ (33188, b"c59d9b6344f1af00e504ba698129f07a34bbed8d"),
+ )
+
+ def test_add_blob_in_dir(self):
+ blob_d = make_object(Blob, data=b"d")
+ new_tree = commit_tree_changes(
+ self.store,
+ self.store[self.tree_id],
+ [(b"e/f/d", 0o100644, blob_d.id)],
+ )
+ self.assertEqual(
+ new_tree.items(),
+ [
+ TreeEntry(path=b"a", mode=stat.S_IFREG | 0o100644, sha=self.blob_a.id),
+ TreeEntry(
+ path=b"ad",
+ mode=stat.S_IFDIR,
+ sha=b"0e2ce2cd7725ff4817791be31ccd6e627e801f4a",
+ ),
+ TreeEntry(path=b"c", mode=stat.S_IFREG | 0o100644, sha=self.blob_c.id),
+ TreeEntry(
+ path=b"e",
+ mode=stat.S_IFDIR,
+ sha=b"6ab344e288724ac2fb38704728b8896e367ed108",
+ ),
+ ],
+ )
+ e_tree = self.store[new_tree[b"e"][1]]
+ self.assertEqual(
+ e_tree.items(),
+ [
+ TreeEntry(
+ path=b"f",
+ mode=stat.S_IFDIR,
+ sha=b"24d2c94d8af232b15a0978c006bf61ef4479a0a5",
+ )
+ ],
+ )
+ f_tree = self.store[e_tree[b"f"][1]]
+ self.assertEqual(
+ f_tree.items(),
+ [TreeEntry(path=b"d", mode=stat.S_IFREG | 0o100644, sha=blob_d.id)],
+ )
+
+ def test_delete_blob(self):
+ new_tree = commit_tree_changes(
+ self.store, self.store[self.tree_id], [(b"ad/bd/c", None, None)]
+ )
+ self.assertEqual(set(new_tree), {b"a", b"ad", b"c"})
+ ad_tree = self.store[new_tree[b"ad"][1]]
+ self.assertEqual(set(ad_tree), {b"b", b"c"})
+
+
+class TestReadPacksFile(TestCase):
+ def test_read_packs(self):
+ self.assertEqual(
+ ["pack-1.pack"],
+ list(
+ read_packs_file(
+ BytesIO(
+ b"""P pack-1.pack
+"""
+ )
+ )
+ ),
+ )
blob - /dev/null
blob + 9c4874bf5f054d0c4232b6c25748f973daed45c6 (mode 644)
--- /dev/null
+++ tests/test_objects.py
+# test_objects.py -- tests for objects.py
+# Copyright (C) 2007 James Westby <jw+debian@jameswestby.net>
+#
+# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
+# General Public License as public by the Free Software Foundation; version 2.0
+# or (at your option) any later version. You can redistribute it and/or
+# modify it under the terms of either of these two licenses.
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# You should have received a copy of the licenses; if not, see
+# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
+# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
+# License, Version 2.0.
+#
+
+"""Tests for git base objects."""
+
+# TODO: Round-trip parse-serialize-parse and serialize-parse-serialize tests.
+
+import datetime
+import os
+import stat
+from contextlib import contextmanager
+from io import BytesIO
+from itertools import permutations
+
+from dulwich.errors import ObjectFormatException
+from dulwich.objects import (
+ MAX_TIME,
+ Blob,
+ Commit,
+ ShaFile,
+ Tag,
+ Tree,
+ TreeEntry,
+ _parse_tree_py,
+ _sorted_tree_items_py,
+ check_hexsha,
+ check_identity,
+ format_timezone,
+ hex_to_filename,
+ hex_to_sha,
+ object_class,
+ parse_timezone,
+ parse_tree,
+ pretty_format_tree_entry,
+ sha_to_hex,
+ sorted_tree_items,
+)
+
+from . import TestCase
+from .utils import ext_functest_builder, functest_builder, make_commit, make_object
+
+a_sha = b"6f670c0fb53f9463760b7295fbb814e965fb20c8"
+b_sha = b"2969be3e8ee1c0222396a5611407e4769f14e54b"
+c_sha = b"954a536f7819d40e6f637f849ee187dd10066349"
+tree_sha = b"70c190eb48fa8bbb50ddc692a17b44cb781af7f6"
+tag_sha = b"71033db03a03c6a36721efcf1968dd8f8e0cf023"
+
+
+class TestHexToSha(TestCase):
+ def test_simple(self):
+ self.assertEqual(b"\xab\xcd" * 10, hex_to_sha(b"abcd" * 10))
+
+ def test_reverse(self):
+ self.assertEqual(b"abcd" * 10, sha_to_hex(b"\xab\xcd" * 10))
+
+
+class BlobReadTests(TestCase):
+ """Test decompression of blobs."""
+
+ def get_sha_file(self, cls, base, sha):
+ dir = os.path.join(os.path.dirname(__file__), "..", "testdata", base)
+ return cls.from_path(hex_to_filename(dir, sha))
+
+ def get_blob(self, sha):
+ """Return the blob named sha from the test data dir."""
+ return self.get_sha_file(Blob, "blobs", sha)
+
+ def get_tree(self, sha):
+ return self.get_sha_file(Tree, "trees", sha)
+
+ def get_tag(self, sha):
+ return self.get_sha_file(Tag, "tags", sha)
+
+ def commit(self, sha):
+ return self.get_sha_file(Commit, "commits", sha)
+
+ def test_decompress_simple_blob(self):
+ b = self.get_blob(a_sha)
+ self.assertEqual(b.data, b"test 1\n")
+ self.assertEqual(b.sha().hexdigest().encode("ascii"), a_sha)
+
+ def test_hash(self):
+ b = self.get_blob(a_sha)
+ self.assertEqual(hash(b.id), hash(b))
+
+ def test_parse_empty_blob_object(self):
+ sha = b"e69de29bb2d1d6434b8b29ae775ad8c2e48c5391"
+ b = self.get_blob(sha)
+ self.assertEqual(b.data, b"")
+ self.assertEqual(b.id, sha)
+ self.assertEqual(b.sha().hexdigest().encode("ascii"), sha)
+
+ def test_create_blob_from_string(self):
+ string = b"test 2\n"
+ b = Blob.from_string(string)
+ self.assertEqual(b.data, string)
+ self.assertEqual(b.sha().hexdigest().encode("ascii"), b_sha)
+
+ def test_legacy_from_file(self):
+ b1 = Blob.from_string(b"foo")
+ b_raw = b1.as_legacy_object()
+ b2 = b1.from_file(BytesIO(b_raw))
+ self.assertEqual(b1, b2)
+
+ def test_legacy_from_file_compression_level(self):
+ b1 = Blob.from_string(b"foo")
+ b_raw = b1.as_legacy_object(compression_level=6)
+ b2 = b1.from_file(BytesIO(b_raw))
+ self.assertEqual(b1, b2)
+
+ def test_chunks(self):
+ string = b"test 5\n"
+ b = Blob.from_string(string)
+ self.assertEqual([string], b.chunked)
+
+ def test_splitlines(self):
+ for case in [
+ [],
+ [b"foo\nbar\n"],
+ [b"bl\na", b"blie"],
+ [b"bl\na", b"blie", b"bloe\n"],
+ [b"", b"bl\na", b"blie", b"bloe\n"],
+ [b"", b"", b"", b"bla\n"],
+ [b"", b"", b"", b"bla\n", b""],
+ [b"bl", b"", b"a\naaa"],
+ [b"a\naaa", b"a"],
+ ]:
+ b = Blob()
+ b.chunked = case
+ self.assertEqual(b.data.splitlines(True), b.splitlines())
+
+ def test_set_chunks(self):
+ b = Blob()
+ b.chunked = [b"te", b"st", b" 5\n"]
+ self.assertEqual(b"test 5\n", b.data)
+ b.chunked = [b"te", b"st", b" 6\n"]
+ self.assertEqual(b"test 6\n", b.as_raw_string())
+ self.assertEqual(b"test 6\n", bytes(b))
+
+ def test_parse_legacy_blob(self):
+ string = b"test 3\n"
+ b = self.get_blob(c_sha)
+ self.assertEqual(b.data, string)
+ self.assertEqual(b.sha().hexdigest().encode("ascii"), c_sha)
+
+ def test_eq(self):
+ blob1 = self.get_blob(a_sha)
+ blob2 = self.get_blob(a_sha)
+ self.assertEqual(blob1, blob2)
+
+ def test_read_tree_from_file(self):
+ t = self.get_tree(tree_sha)
+ self.assertEqual(t.items()[0], (b"a", 33188, a_sha))
+ self.assertEqual(t.items()[1], (b"b", 33188, b_sha))
+
+ def test_read_tree_from_file_parse_count(self):
+ old_deserialize = Tree._deserialize
+
+ def reset_deserialize():
+ Tree._deserialize = old_deserialize
+
+ self.addCleanup(reset_deserialize)
+ self.deserialize_count = 0
+
+ def counting_deserialize(*args, **kwargs):
+ self.deserialize_count += 1
+ return old_deserialize(*args, **kwargs)
+
+ Tree._deserialize = counting_deserialize
+ t = self.get_tree(tree_sha)
+ self.assertEqual(t.items()[0], (b"a", 33188, a_sha))
+ self.assertEqual(t.items()[1], (b"b", 33188, b_sha))
+ self.assertEqual(self.deserialize_count, 1)
+
+ def test_read_tag_from_file(self):
+ t = self.get_tag(tag_sha)
+ self.assertEqual(
+ t.object, (Commit, b"51b668fd5bf7061b7d6fa525f88803e6cfadaa51")
+ )
+ self.assertEqual(t.name, b"signed")
+ self.assertEqual(t.tagger, b"Ali Sabil <ali.sabil@gmail.com>")
+ self.assertEqual(t.tag_time, 1231203091)
+ self.assertEqual(t.message, b"This is a signed tag\n")
+ self.assertEqual(
+ t.signature,
+ b"-----BEGIN PGP SIGNATURE-----\n"
+ b"Version: GnuPG v1.4.9 (GNU/Linux)\n"
+ b"\n"
+ b"iEYEABECAAYFAkliqx8ACgkQqSMmLy9u/"
+ b"kcx5ACfakZ9NnPl02tOyYP6pkBoEkU1\n"
+ b"5EcAn0UFgokaSvS371Ym/4W9iJj6vh3h\n"
+ b"=ql7y\n"
+ b"-----END PGP SIGNATURE-----\n",
+ )
+
+ def test_read_commit_from_file(self):
+ sha = b"60dacdc733de308bb77bb76ce0fb0f9b44c9769e"
+ c = self.commit(sha)
+ self.assertEqual(c.tree, tree_sha)
+ self.assertEqual(c.parents, [b"0d89f20333fbb1d2f3a94da77f4981373d8f4310"])
+ self.assertEqual(c.author, b"James Westby <jw+debian@jameswestby.net>")
+ self.assertEqual(c.committer, b"James Westby <jw+debian@jameswestby.net>")
+ self.assertEqual(c.commit_time, 1174759230)
+ self.assertEqual(c.commit_timezone, 0)
+ self.assertEqual(c.author_timezone, 0)
+ self.assertEqual(c.message, b"Test commit\n")
+
+ def test_read_commit_no_parents(self):
+ sha = b"0d89f20333fbb1d2f3a94da77f4981373d8f4310"
+ c = self.commit(sha)
+ self.assertEqual(c.tree, b"90182552c4a85a45ec2a835cadc3451bebdfe870")
+ self.assertEqual(c.parents, [])
+ self.assertEqual(c.author, b"James Westby <jw+debian@jameswestby.net>")
+ self.assertEqual(c.committer, b"James Westby <jw+debian@jameswestby.net>")
+ self.assertEqual(c.commit_time, 1174758034)
+ self.assertEqual(c.commit_timezone, 0)
+ self.assertEqual(c.author_timezone, 0)
+ self.assertEqual(c.message, b"Test commit\n")
+
+ def test_read_commit_two_parents(self):
+ sha = b"5dac377bdded4c9aeb8dff595f0faeebcc8498cc"
+ c = self.commit(sha)
+ self.assertEqual(c.tree, b"d80c186a03f423a81b39df39dc87fd269736ca86")
+ self.assertEqual(
+ c.parents,
+ [
+ b"ab64bbdcc51b170d21588e5c5d391ee5c0c96dfd",
+ b"4cffe90e0a41ad3f5190079d7c8f036bde29cbe6",
+ ],
+ )
+ self.assertEqual(c.author, b"James Westby <jw+debian@jameswestby.net>")
+ self.assertEqual(c.committer, b"James Westby <jw+debian@jameswestby.net>")
+ self.assertEqual(c.commit_time, 1174773719)
+ self.assertEqual(c.commit_timezone, 0)
+ self.assertEqual(c.author_timezone, 0)
+ self.assertEqual(c.message, b"Merge ../b\n")
+
+ def test_stub_sha(self):
+ sha = b"5" * 40
+ c = make_commit(id=sha, message=b"foo")
+ self.assertIsInstance(c, Commit)
+ self.assertEqual(sha, c.id)
+ self.assertNotEqual(sha, c.sha())
+
+
+class ShaFileCheckTests(TestCase):
+ def assertCheckFails(self, cls, data):
+ obj = cls()
+
+ def do_check():
+ obj.set_raw_string(data)
+ obj.check()
+
+ self.assertRaises(ObjectFormatException, do_check)
+
+ def assertCheckSucceeds(self, cls, data):
+ obj = cls()
+ obj.set_raw_string(data)
+ self.assertEqual(None, obj.check())
+
+
+small_buffer_zlib_object = (
+ b"\x48\x89\x15\xcc\x31\x0e\xc2\x30\x0c\x40\x51\xe6"
+ b"\x9c\xc2\x3b\xaa\x64\x37\xc4\xc1\x12\x42\x5c\xc5"
+ b"\x49\xac\x52\xd4\x92\xaa\x78\xe1\xf6\x94\xed\xeb"
+ b"\x0d\xdf\x75\x02\xa2\x7c\xea\xe5\x65\xd5\x81\x8b"
+ b"\x9a\x61\xba\xa0\xa9\x08\x36\xc9\x4c\x1a\xad\x88"
+ b"\x16\xba\x46\xc4\xa8\x99\x6a\x64\xe1\xe0\xdf\xcd"
+ b"\xa0\xf6\x75\x9d\x3d\xf8\xf1\xd0\x77\xdb\xfb\xdc"
+ b"\x86\xa3\x87\xf1\x2f\x93\xed\x00\xb7\xc7\xd2\xab"
+ b"\x2e\xcf\xfe\xf1\x3b\x50\xa4\x91\x53\x12\x24\x38"
+ b"\x23\x21\x86\xf0\x03\x2f\x91\x24\x52"
+)
+
+
+class ShaFileTests(TestCase):
+ def test_deflated_smaller_window_buffer(self):
+ # zlib on some systems uses smaller buffers,
+ # resulting in a different header.
+ # See https://github.com/libgit2/libgit2/pull/464
+ sf = ShaFile.from_file(BytesIO(small_buffer_zlib_object))
+ self.assertEqual(sf.type_name, b"tag")
+ self.assertEqual(sf.tagger, b" <@localhost>")
+
+
+class CommitSerializationTests(TestCase):
+ def make_commit(self, **kwargs):
+ attrs = {
+ "tree": b"d80c186a03f423a81b39df39dc87fd269736ca86",
+ "parents": [
+ b"ab64bbdcc51b170d21588e5c5d391ee5c0c96dfd",
+ b"4cffe90e0a41ad3f5190079d7c8f036bde29cbe6",
+ ],
+ "author": b"James Westby <jw+debian@jameswestby.net>",
+ "committer": b"James Westby <jw+debian@jameswestby.net>",
+ "commit_time": 1174773719,
+ "author_time": 1174773719,
+ "commit_timezone": 0,
+ "author_timezone": 0,
+ "message": b"Merge ../b\n",
+ }
+ attrs.update(kwargs)
+ return make_commit(**attrs)
+
+ def test_encoding(self):
+ c = self.make_commit(encoding=b"iso8859-1")
+ self.assertIn(b"encoding iso8859-1\n", c.as_raw_string())
+
+ def test_short_timestamp(self):
+ c = self.make_commit(commit_time=30)
+ c1 = Commit()
+ c1.set_raw_string(c.as_raw_string())
+ self.assertEqual(30, c1.commit_time)
+
+ def test_full_tree(self):
+ c = self.make_commit(commit_time=30)
+ t = Tree()
+ t.add(b"data-x", 0o644, Blob().id)
+ c.tree = t
+ c1 = Commit()
+ c1.set_raw_string(c.as_raw_string())
+ self.assertEqual(t.id, c1.tree)
+ self.assertEqual(c.as_raw_string(), c1.as_raw_string())
+
+ def test_raw_length(self):
+ c = self.make_commit()
+ self.assertEqual(len(c.as_raw_string()), c.raw_length())
+
+ def test_simple(self):
+ c = self.make_commit()
+ self.assertEqual(c.id, b"5dac377bdded4c9aeb8dff595f0faeebcc8498cc")
+ self.assertEqual(
+ b"tree d80c186a03f423a81b39df39dc87fd269736ca86\n"
+ b"parent ab64bbdcc51b170d21588e5c5d391ee5c0c96dfd\n"
+ b"parent 4cffe90e0a41ad3f5190079d7c8f036bde29cbe6\n"
+ b"author James Westby <jw+debian@jameswestby.net> "
+ b"1174773719 +0000\n"
+ b"committer James Westby <jw+debian@jameswestby.net> "
+ b"1174773719 +0000\n"
+ b"\n"
+ b"Merge ../b\n",
+ c.as_raw_string(),
+ )
+
+ def test_timezone(self):
+ c = self.make_commit(commit_timezone=(5 * 60))
+ self.assertIn(b" +0005\n", c.as_raw_string())
+
+ def test_neg_timezone(self):
+ c = self.make_commit(commit_timezone=(-1 * 3600))
+ self.assertIn(b" -0100\n", c.as_raw_string())
+
+ def test_deserialize(self):
+ c = self.make_commit()
+ d = Commit()
+ d._deserialize(c.as_raw_chunks())
+ self.assertEqual(c, d)
+
+ def test_serialize_gpgsig(self):
+ commit = self.make_commit(
+ gpgsig=b"""-----BEGIN PGP SIGNATURE-----
+Version: GnuPG v1
+
+iQIcBAABCgAGBQJULCdfAAoJEACAbyvXKaRXuKwP/RyP9PA49uAvu8tQVCC/uBa8
+vi975+xvO14R8Pp8k2nps7lSxCdtCd+xVT1VRHs0wNhOZo2YCVoU1HATkPejqSeV
+NScTHcxnk4/+bxyfk14xvJkNp7FlQ3npmBkA+lbV0Ubr33rvtIE5jiJPyz+SgWAg
+xdBG2TojV0squj00GoH/euK6aX7GgZtwdtpTv44haCQdSuPGDcI4TORqR6YSqvy3
+GPE+3ZqXPFFb+KILtimkxitdwB7CpwmNse2vE3rONSwTvi8nq3ZoQYNY73CQGkUy
+qoFU0pDtw87U3niFin1ZccDgH0bB6624sLViqrjcbYJeg815Htsu4rmzVaZADEVC
+XhIO4MThebusdk0AcNGjgpf3HRHk0DPMDDlIjm+Oao0cqovvF6VyYmcb0C+RmhJj
+dodLXMNmbqErwTk3zEkW0yZvNIYXH7m9SokPCZa4eeIM7be62X6h1mbt0/IU6Th+
+v18fS0iTMP/Viug5und+05C/v04kgDo0CPphAbXwWMnkE4B6Tl9sdyUYXtvQsL7x
+0+WP1gL27ANqNZiI07Kz/BhbBAQI/+2TFT7oGr0AnFPQ5jHp+3GpUf6OKuT1wT3H
+ND189UFuRuubxb42vZhpcXRbqJVWnbECTKVUPsGZqat3enQUB63uM4i6/RdONDZA
+fDeF1m4qYs+cUXKNUZ03
+=X6RT
+-----END PGP SIGNATURE-----"""
+ )
+ self.maxDiff = None
+ self.assertEqual(
+ b"""\
+tree d80c186a03f423a81b39df39dc87fd269736ca86
+parent ab64bbdcc51b170d21588e5c5d391ee5c0c96dfd
+parent 4cffe90e0a41ad3f5190079d7c8f036bde29cbe6
+author James Westby <jw+debian@jameswestby.net> 1174773719 +0000
+committer James Westby <jw+debian@jameswestby.net> 1174773719 +0000
+gpgsig -----BEGIN PGP SIGNATURE-----
+ Version: GnuPG v1
+
+ iQIcBAABCgAGBQJULCdfAAoJEACAbyvXKaRXuKwP/RyP9PA49uAvu8tQVCC/uBa8
+ vi975+xvO14R8Pp8k2nps7lSxCdtCd+xVT1VRHs0wNhOZo2YCVoU1HATkPejqSeV
+ NScTHcxnk4/+bxyfk14xvJkNp7FlQ3npmBkA+lbV0Ubr33rvtIE5jiJPyz+SgWAg
+ xdBG2TojV0squj00GoH/euK6aX7GgZtwdtpTv44haCQdSuPGDcI4TORqR6YSqvy3
+ GPE+3ZqXPFFb+KILtimkxitdwB7CpwmNse2vE3rONSwTvi8nq3ZoQYNY73CQGkUy
+ qoFU0pDtw87U3niFin1ZccDgH0bB6624sLViqrjcbYJeg815Htsu4rmzVaZADEVC
+ XhIO4MThebusdk0AcNGjgpf3HRHk0DPMDDlIjm+Oao0cqovvF6VyYmcb0C+RmhJj
+ dodLXMNmbqErwTk3zEkW0yZvNIYXH7m9SokPCZa4eeIM7be62X6h1mbt0/IU6Th+
+ v18fS0iTMP/Viug5und+05C/v04kgDo0CPphAbXwWMnkE4B6Tl9sdyUYXtvQsL7x
+ 0+WP1gL27ANqNZiI07Kz/BhbBAQI/+2TFT7oGr0AnFPQ5jHp+3GpUf6OKuT1wT3H
+ ND189UFuRuubxb42vZhpcXRbqJVWnbECTKVUPsGZqat3enQUB63uM4i6/RdONDZA
+ fDeF1m4qYs+cUXKNUZ03
+ =X6RT
+ -----END PGP SIGNATURE-----
+
+Merge ../b
+""",
+ commit.as_raw_string(),
+ )
+
+ def test_serialize_mergetag(self):
+ tag = make_object(
+ Tag,
+ object=(Commit, b"a38d6181ff27824c79fc7df825164a212eff6a3f"),
+ object_type_name=b"commit",
+ name=b"v2.6.22-rc7",
+ tag_time=1183319674,
+ tag_timezone=0,
+ tagger=b"Linus Torvalds <torvalds@woody.linux-foundation.org>",
+ message=default_message,
+ )
+ commit = self.make_commit(mergetag=[tag])
+
+ self.assertEqual(
+ b"""tree d80c186a03f423a81b39df39dc87fd269736ca86
+parent ab64bbdcc51b170d21588e5c5d391ee5c0c96dfd
+parent 4cffe90e0a41ad3f5190079d7c8f036bde29cbe6
+author James Westby <jw+debian@jameswestby.net> 1174773719 +0000
+committer James Westby <jw+debian@jameswestby.net> 1174773719 +0000
+mergetag object a38d6181ff27824c79fc7df825164a212eff6a3f
+ type commit
+ tag v2.6.22-rc7
+ tagger Linus Torvalds <torvalds@woody.linux-foundation.org> 1183319674 +0000
+
+ Linux 2.6.22-rc7
+ -----BEGIN PGP SIGNATURE-----
+ Version: GnuPG v1.4.7 (GNU/Linux)
+
+ iD8DBQBGiAaAF3YsRnbiHLsRAitMAKCiLboJkQECM/jpYsY3WPfvUgLXkACgg3ql
+ OK2XeQOiEeXtT76rV4t2WR4=
+ =ivrA
+ -----END PGP SIGNATURE-----
+
+Merge ../b
+""",
+ commit.as_raw_string(),
+ )
+
+ def test_serialize_mergetags(self):
+ tag = make_object(
+ Tag,
+ object=(Commit, b"a38d6181ff27824c79fc7df825164a212eff6a3f"),
+ object_type_name=b"commit",
+ name=b"v2.6.22-rc7",
+ tag_time=1183319674,
+ tag_timezone=0,
+ tagger=b"Linus Torvalds <torvalds@woody.linux-foundation.org>",
+ message=default_message,
+ )
+ commit = self.make_commit(mergetag=[tag, tag])
+
+ self.assertEqual(
+ b"""tree d80c186a03f423a81b39df39dc87fd269736ca86
+parent ab64bbdcc51b170d21588e5c5d391ee5c0c96dfd
+parent 4cffe90e0a41ad3f5190079d7c8f036bde29cbe6
+author James Westby <jw+debian@jameswestby.net> 1174773719 +0000
+committer James Westby <jw+debian@jameswestby.net> 1174773719 +0000
+mergetag object a38d6181ff27824c79fc7df825164a212eff6a3f
+ type commit
+ tag v2.6.22-rc7
+ tagger Linus Torvalds <torvalds@woody.linux-foundation.org> 1183319674 +0000
+
+ Linux 2.6.22-rc7
+ -----BEGIN PGP SIGNATURE-----
+ Version: GnuPG v1.4.7 (GNU/Linux)
+
+ iD8DBQBGiAaAF3YsRnbiHLsRAitMAKCiLboJkQECM/jpYsY3WPfvUgLXkACgg3ql
+ OK2XeQOiEeXtT76rV4t2WR4=
+ =ivrA
+ -----END PGP SIGNATURE-----
+mergetag object a38d6181ff27824c79fc7df825164a212eff6a3f
+ type commit
+ tag v2.6.22-rc7
+ tagger Linus Torvalds <torvalds@woody.linux-foundation.org> 1183319674 +0000
+
+ Linux 2.6.22-rc7
+ -----BEGIN PGP SIGNATURE-----
+ Version: GnuPG v1.4.7 (GNU/Linux)
+
+ iD8DBQBGiAaAF3YsRnbiHLsRAitMAKCiLboJkQECM/jpYsY3WPfvUgLXkACgg3ql
+ OK2XeQOiEeXtT76rV4t2WR4=
+ =ivrA
+ -----END PGP SIGNATURE-----
+
+Merge ../b
+""",
+ commit.as_raw_string(),
+ )
+
+ def test_deserialize_mergetag(self):
+ tag = make_object(
+ Tag,
+ object=(Commit, b"a38d6181ff27824c79fc7df825164a212eff6a3f"),
+ object_type_name=b"commit",
+ name=b"v2.6.22-rc7",
+ tag_time=1183319674,
+ tag_timezone=0,
+ tagger=b"Linus Torvalds <torvalds@woody.linux-foundation.org>",
+ message=default_message,
+ )
+ commit = self.make_commit(mergetag=[tag])
+
+ d = Commit()
+ d._deserialize(commit.as_raw_chunks())
+ self.assertEqual(commit, d)
+
+ def test_deserialize_mergetags(self):
+ tag = make_object(
+ Tag,
+ object=(Commit, b"a38d6181ff27824c79fc7df825164a212eff6a3f"),
+ object_type_name=b"commit",
+ name=b"v2.6.22-rc7",
+ tag_time=1183319674,
+ tag_timezone=0,
+ tagger=b"Linus Torvalds <torvalds@woody.linux-foundation.org>",
+ message=default_message,
+ )
+ commit = self.make_commit(mergetag=[tag, tag])
+
+ d = Commit()
+ d._deserialize(commit.as_raw_chunks())
+ self.assertEqual(commit, d)
+
+
+default_committer = b"James Westby <jw+debian@jameswestby.net> 1174773719 +0000"
+
+
+class CommitParseTests(ShaFileCheckTests):
+ def make_commit_lines(
+ self,
+ tree=b"d80c186a03f423a81b39df39dc87fd269736ca86",
+ parents=[
+ b"ab64bbdcc51b170d21588e5c5d391ee5c0c96dfd",
+ b"4cffe90e0a41ad3f5190079d7c8f036bde29cbe6",
+ ],
+ author=default_committer,
+ committer=default_committer,
+ encoding=None,
+ message=b"Merge ../b\n",
+ extra=None,
+ ):
+ lines = []
+ if tree is not None:
+ lines.append(b"tree " + tree)
+ if parents is not None:
+ lines.extend(b"parent " + p for p in parents)
+ if author is not None:
+ lines.append(b"author " + author)
+ if committer is not None:
+ lines.append(b"committer " + committer)
+ if encoding is not None:
+ lines.append(b"encoding " + encoding)
+ if extra is not None:
+ for name, value in sorted(extra.items()):
+ lines.append(name + b" " + value)
+ lines.append(b"")
+ if message is not None:
+ lines.append(message)
+ return lines
+
+ def make_commit_text(self, **kwargs):
+ return b"\n".join(self.make_commit_lines(**kwargs))
+
+ def test_simple(self):
+ c = Commit.from_string(self.make_commit_text())
+ self.assertEqual(b"Merge ../b\n", c.message)
+ self.assertEqual(b"James Westby <jw+debian@jameswestby.net>", c.author)
+ self.assertEqual(b"James Westby <jw+debian@jameswestby.net>", c.committer)
+ self.assertEqual(b"d80c186a03f423a81b39df39dc87fd269736ca86", c.tree)
+ self.assertEqual(
+ [
+ b"ab64bbdcc51b170d21588e5c5d391ee5c0c96dfd",
+ b"4cffe90e0a41ad3f5190079d7c8f036bde29cbe6",
+ ],
+ c.parents,
+ )
+ expected_time = datetime.datetime(2007, 3, 24, 22, 1, 59)
+ self.assertEqual(
+ expected_time, datetime.datetime.utcfromtimestamp(c.commit_time)
+ )
+ self.assertEqual(0, c.commit_timezone)
+ self.assertEqual(
+ expected_time, datetime.datetime.utcfromtimestamp(c.author_time)
+ )
+ self.assertEqual(0, c.author_timezone)
+ self.assertEqual(None, c.encoding)
+
+ def test_custom(self):
+ c = Commit.from_string(self.make_commit_text(extra={b"extra-field": b"data"}))
+ self.assertEqual([(b"extra-field", b"data")], c._extra)
+
+ def test_encoding(self):
+ c = Commit.from_string(self.make_commit_text(encoding=b"UTF-8"))
+ self.assertEqual(b"UTF-8", c.encoding)
+
+ def test_check(self):
+ self.assertCheckSucceeds(Commit, self.make_commit_text())
+ self.assertCheckSucceeds(Commit, self.make_commit_text(parents=None))
+ self.assertCheckSucceeds(Commit, self.make_commit_text(encoding=b"UTF-8"))
+
+ self.assertCheckFails(Commit, self.make_commit_text(tree=b"xxx"))
+ self.assertCheckFails(Commit, self.make_commit_text(parents=[a_sha, b"xxx"]))
+ bad_committer = b"some guy without an email address 1174773719 +0000"
+ self.assertCheckFails(Commit, self.make_commit_text(committer=bad_committer))
+ self.assertCheckFails(Commit, self.make_commit_text(author=bad_committer))
+ self.assertCheckFails(Commit, self.make_commit_text(author=None))
+ self.assertCheckFails(Commit, self.make_commit_text(committer=None))
+ self.assertCheckFails(
+ Commit, self.make_commit_text(author=None, committer=None)
+ )
+
+ def test_check_duplicates(self):
+ # duplicate each of the header fields
+ for i in range(5):
+ lines = self.make_commit_lines(parents=[a_sha], encoding=b"UTF-8")
+ lines.insert(i, lines[i])
+ text = b"\n".join(lines)
+ if lines[i].startswith(b"parent"):
+ # duplicate parents are ok for now
+ self.assertCheckSucceeds(Commit, text)
+ else:
+ self.assertCheckFails(Commit, text)
+
+ def test_check_order(self):
+ lines = self.make_commit_lines(parents=[a_sha], encoding=b"UTF-8")
+ headers = lines[:5]
+ rest = lines[5:]
+ # of all possible permutations, ensure only the original succeeds
+ for perm in permutations(headers):
+ perm = list(perm)
+ text = b"\n".join(perm + rest)
+ if perm == headers:
+ self.assertCheckSucceeds(Commit, text)
+ else:
+ self.assertCheckFails(Commit, text)
+
+ def test_check_commit_with_unparseable_time(self):
+ identity_with_wrong_time = (
+ b"Igor Sysoev <igor@sysoev.ru> 18446743887488505614+42707004"
+ )
+
+ # Those fail at reading time
+ self.assertCheckFails(
+ Commit,
+ self.make_commit_text(
+ author=default_committer, committer=identity_with_wrong_time
+ ),
+ )
+ self.assertCheckFails(
+ Commit,
+ self.make_commit_text(
+ author=identity_with_wrong_time, committer=default_committer
+ ),
+ )
+
+ def test_check_commit_with_overflow_date(self):
+ """Date with overflow should raise an ObjectFormatException when checked."""
+ identity_with_wrong_time = (
+ b"Igor Sysoev <igor@sysoev.ru> 18446743887488505614 +42707004"
+ )
+ commit0 = Commit.from_string(
+ self.make_commit_text(
+ author=identity_with_wrong_time, committer=default_committer
+ )
+ )
+ commit1 = Commit.from_string(
+ self.make_commit_text(
+ author=default_committer, committer=identity_with_wrong_time
+ )
+ )
+
+ # Those fails when triggering the check() method
+ for commit in [commit0, commit1]:
+ with self.assertRaises(ObjectFormatException):
+ commit.check()
+
+ def test_mangled_author_line(self):
+ """Mangled author line should successfully parse."""
+ author_line = (
+ b'Karl MacMillan <kmacmill@redhat.com> <"Karl MacMillan '
+ b'<kmacmill@redhat.com>"> 1197475547 -0500'
+ )
+ expected_identity = (
+ b'Karl MacMillan <kmacmill@redhat.com> <"Karl MacMillan '
+ b'<kmacmill@redhat.com>">'
+ )
+ commit = Commit.from_string(self.make_commit_text(author=author_line))
+
+ # The commit parses properly
+ self.assertEqual(commit.author, expected_identity)
+
+ # But the check fails because the author identity is bogus
+ with self.assertRaises(ObjectFormatException):
+ commit.check()
+
+ def test_parse_gpgsig(self):
+ c = Commit.from_string(
+ b"""tree aaff74984cccd156a469afa7d9ab10e4777beb24
+author Jelmer Vernooij <jelmer@samba.org> 1412179807 +0200
+committer Jelmer Vernooij <jelmer@samba.org> 1412179807 +0200
+gpgsig -----BEGIN PGP SIGNATURE-----
+ Version: GnuPG v1
+
+ iQIcBAABCgAGBQJULCdfAAoJEACAbyvXKaRXuKwP/RyP9PA49uAvu8tQVCC/uBa8
+ vi975+xvO14R8Pp8k2nps7lSxCdtCd+xVT1VRHs0wNhOZo2YCVoU1HATkPejqSeV
+ NScTHcxnk4/+bxyfk14xvJkNp7FlQ3npmBkA+lbV0Ubr33rvtIE5jiJPyz+SgWAg
+ xdBG2TojV0squj00GoH/euK6aX7GgZtwdtpTv44haCQdSuPGDcI4TORqR6YSqvy3
+ GPE+3ZqXPFFb+KILtimkxitdwB7CpwmNse2vE3rONSwTvi8nq3ZoQYNY73CQGkUy
+ qoFU0pDtw87U3niFin1ZccDgH0bB6624sLViqrjcbYJeg815Htsu4rmzVaZADEVC
+ XhIO4MThebusdk0AcNGjgpf3HRHk0DPMDDlIjm+Oao0cqovvF6VyYmcb0C+RmhJj
+ dodLXMNmbqErwTk3zEkW0yZvNIYXH7m9SokPCZa4eeIM7be62X6h1mbt0/IU6Th+
+ v18fS0iTMP/Viug5und+05C/v04kgDo0CPphAbXwWMnkE4B6Tl9sdyUYXtvQsL7x
+ 0+WP1gL27ANqNZiI07Kz/BhbBAQI/+2TFT7oGr0AnFPQ5jHp+3GpUf6OKuT1wT3H
+ ND189UFuRuubxb42vZhpcXRbqJVWnbECTKVUPsGZqat3enQUB63uM4i6/RdONDZA
+ fDeF1m4qYs+cUXKNUZ03
+ =X6RT
+ -----END PGP SIGNATURE-----
+
+foo
+"""
+ )
+ self.assertEqual(b"foo\n", c.message)
+ self.assertEqual([], c._extra)
+ self.assertEqual(
+ b"""-----BEGIN PGP SIGNATURE-----
+Version: GnuPG v1
+
+iQIcBAABCgAGBQJULCdfAAoJEACAbyvXKaRXuKwP/RyP9PA49uAvu8tQVCC/uBa8
+vi975+xvO14R8Pp8k2nps7lSxCdtCd+xVT1VRHs0wNhOZo2YCVoU1HATkPejqSeV
+NScTHcxnk4/+bxyfk14xvJkNp7FlQ3npmBkA+lbV0Ubr33rvtIE5jiJPyz+SgWAg
+xdBG2TojV0squj00GoH/euK6aX7GgZtwdtpTv44haCQdSuPGDcI4TORqR6YSqvy3
+GPE+3ZqXPFFb+KILtimkxitdwB7CpwmNse2vE3rONSwTvi8nq3ZoQYNY73CQGkUy
+qoFU0pDtw87U3niFin1ZccDgH0bB6624sLViqrjcbYJeg815Htsu4rmzVaZADEVC
+XhIO4MThebusdk0AcNGjgpf3HRHk0DPMDDlIjm+Oao0cqovvF6VyYmcb0C+RmhJj
+dodLXMNmbqErwTk3zEkW0yZvNIYXH7m9SokPCZa4eeIM7be62X6h1mbt0/IU6Th+
+v18fS0iTMP/Viug5und+05C/v04kgDo0CPphAbXwWMnkE4B6Tl9sdyUYXtvQsL7x
+0+WP1gL27ANqNZiI07Kz/BhbBAQI/+2TFT7oGr0AnFPQ5jHp+3GpUf6OKuT1wT3H
+ND189UFuRuubxb42vZhpcXRbqJVWnbECTKVUPsGZqat3enQUB63uM4i6/RdONDZA
+fDeF1m4qYs+cUXKNUZ03
+=X6RT
+-----END PGP SIGNATURE-----""",
+ c.gpgsig,
+ )
+
+ def test_parse_header_trailing_newline(self):
+ c = Commit.from_string(
+ b"""\
+tree a7d6277f78d3ecd0230a1a5df6db00b1d9c521ac
+parent c09b6dec7a73760fbdb478383a3c926b18db8bbe
+author Neil Matatall <oreoshake@github.com> 1461964057 -1000
+committer Neil Matatall <oreoshake@github.com> 1461964057 -1000
+gpgsig -----BEGIN PGP SIGNATURE-----
+
+ wsBcBAABCAAQBQJXI80ZCRA6pcNDcVZ70gAAarcIABs72xRX3FWeox349nh6ucJK
+ CtwmBTusez2Zwmq895fQEbZK7jpaGO5TRO4OvjFxlRo0E08UFx3pxZHSpj6bsFeL
+ hHsDXnCaotphLkbgKKRdGZo7tDqM84wuEDlh4MwNe7qlFC7bYLDyysc81ZX5lpMm
+ 2MFF1TvjLAzSvkT7H1LPkuR3hSvfCYhikbPOUNnKOo0sYjeJeAJ/JdAVQ4mdJIM0
+ gl3REp9+A+qBEpNQI7z94Pg5Bc5xenwuDh3SJgHvJV6zBWupWcdB3fAkVd4TPnEZ
+ nHxksHfeNln9RKseIDcy4b2ATjhDNIJZARHNfr6oy4u3XPW4svRqtBsLoMiIeuI=
+ =ms6q
+ -----END PGP SIGNATURE-----
+
+
+3.3.0 version bump and docs
+"""
+ )
+ self.assertEqual([], c._extra)
+ self.assertEqual(
+ b"""\
+-----BEGIN PGP SIGNATURE-----
+
+wsBcBAABCAAQBQJXI80ZCRA6pcNDcVZ70gAAarcIABs72xRX3FWeox349nh6ucJK
+CtwmBTusez2Zwmq895fQEbZK7jpaGO5TRO4OvjFxlRo0E08UFx3pxZHSpj6bsFeL
+hHsDXnCaotphLkbgKKRdGZo7tDqM84wuEDlh4MwNe7qlFC7bYLDyysc81ZX5lpMm
+2MFF1TvjLAzSvkT7H1LPkuR3hSvfCYhikbPOUNnKOo0sYjeJeAJ/JdAVQ4mdJIM0
+gl3REp9+A+qBEpNQI7z94Pg5Bc5xenwuDh3SJgHvJV6zBWupWcdB3fAkVd4TPnEZ
+nHxksHfeNln9RKseIDcy4b2ATjhDNIJZARHNfr6oy4u3XPW4svRqtBsLoMiIeuI=
+=ms6q
+-----END PGP SIGNATURE-----\n""",
+ c.gpgsig,
+ )
+
+
+_TREE_ITEMS = {
+ b"a.c": (0o100755, b"d80c186a03f423a81b39df39dc87fd269736ca86"),
+ b"a": (stat.S_IFDIR, b"d80c186a03f423a81b39df39dc87fd269736ca86"),
+ b"a/c": (stat.S_IFDIR, b"d80c186a03f423a81b39df39dc87fd269736ca86"),
+}
+
+_SORTED_TREE_ITEMS = [
+ TreeEntry(b"a.c", 0o100755, b"d80c186a03f423a81b39df39dc87fd269736ca86"),
+ TreeEntry(b"a", stat.S_IFDIR, b"d80c186a03f423a81b39df39dc87fd269736ca86"),
+ TreeEntry(b"a/c", stat.S_IFDIR, b"d80c186a03f423a81b39df39dc87fd269736ca86"),
+]
+
+
+class TreeTests(ShaFileCheckTests):
+ def test_add(self):
+ myhexsha = b"d80c186a03f423a81b39df39dc87fd269736ca86"
+ x = Tree()
+ x.add(b"myname", 0o100755, myhexsha)
+ self.assertEqual(x[b"myname"], (0o100755, myhexsha))
+ self.assertEqual(b"100755 myname\0" + hex_to_sha(myhexsha), x.as_raw_string())
+
+ def test_simple(self):
+ myhexsha = b"d80c186a03f423a81b39df39dc87fd269736ca86"
+ x = Tree()
+ x[b"myname"] = (0o100755, myhexsha)
+ self.assertEqual(b"100755 myname\0" + hex_to_sha(myhexsha), x.as_raw_string())
+ self.assertEqual(b"100755 myname\0" + hex_to_sha(myhexsha), bytes(x))
+
+ def test_tree_update_id(self):
+ x = Tree()
+ x[b"a.c"] = (0o100755, b"d80c186a03f423a81b39df39dc87fd269736ca86")
+ self.assertEqual(b"0c5c6bc2c081accfbc250331b19e43b904ab9cdd", x.id)
+ x[b"a.b"] = (stat.S_IFDIR, b"d80c186a03f423a81b39df39dc87fd269736ca86")
+ self.assertEqual(b"07bfcb5f3ada15bbebdfa3bbb8fd858a363925c8", x.id)
+
+ def test_tree_iteritems_dir_sort(self):
+ x = Tree()
+ for name, item in _TREE_ITEMS.items():
+ x[name] = item
+ self.assertEqual(_SORTED_TREE_ITEMS, x.items())
+
+ def test_tree_items_dir_sort(self):
+ x = Tree()
+ for name, item in _TREE_ITEMS.items():
+ x[name] = item
+ self.assertEqual(_SORTED_TREE_ITEMS, x.items())
+
+ def _do_test_parse_tree(self, parse_tree):
+ dir = os.path.join(os.path.dirname(__file__), "..", "testdata", "trees")
+ o = Tree.from_path(hex_to_filename(dir, tree_sha))
+ self.assertEqual(
+ [(b"a", 0o100644, a_sha), (b"b", 0o100644, b_sha)],
+ list(parse_tree(o.as_raw_string())),
+ )
+ # test a broken tree that has a leading 0 on the file mode
+ broken_tree = b"0100644 foo\0" + hex_to_sha(a_sha)
+
+ def eval_parse_tree(*args, **kwargs):
+ return list(parse_tree(*args, **kwargs))
+
+ self.assertEqual([(b"foo", 0o100644, a_sha)], eval_parse_tree(broken_tree))
+ self.assertRaises(
+ ObjectFormatException, eval_parse_tree, broken_tree, strict=True
+ )
+
+ test_parse_tree = functest_builder(_do_test_parse_tree, _parse_tree_py)
+ test_parse_tree_extension = ext_functest_builder(_do_test_parse_tree, parse_tree)
+
+ def _do_test_sorted_tree_items(self, sorted_tree_items):
+ def do_sort(entries):
+ return list(sorted_tree_items(entries, False))
+
+ actual = do_sort(_TREE_ITEMS)
+ self.assertEqual(_SORTED_TREE_ITEMS, actual)
+ self.assertIsInstance(actual[0], TreeEntry)
+
+ # C/Python implementations may differ in specific error types, but
+ # should all error on invalid inputs.
+ # For example, the C implementation has stricter type checks, so may
+ # raise TypeError where the Python implementation raises
+ # AttributeError.
+ errors = (TypeError, ValueError, AttributeError)
+ self.assertRaises(errors, do_sort, b"foo")
+ self.assertRaises(errors, do_sort, {b"foo": (1, 2, 3)})
+
+ myhexsha = b"d80c186a03f423a81b39df39dc87fd269736ca86"
+ self.assertRaises(errors, do_sort, {b"foo": (b"xxx", myhexsha)})
+ self.assertRaises(errors, do_sort, {b"foo": (0o100755, 12345)})
+
+ test_sorted_tree_items = functest_builder(
+ _do_test_sorted_tree_items, _sorted_tree_items_py
+ )
+ test_sorted_tree_items_extension = ext_functest_builder(
+ _do_test_sorted_tree_items, sorted_tree_items
+ )
+
+ def _do_test_sorted_tree_items_name_order(self, sorted_tree_items):
+ self.assertEqual(
+ [
+ TreeEntry(
+ b"a",
+ stat.S_IFDIR,
+ b"d80c186a03f423a81b39df39dc87fd269736ca86",
+ ),
+ TreeEntry(
+ b"a.c",
+ 0o100755,
+ b"d80c186a03f423a81b39df39dc87fd269736ca86",
+ ),
+ TreeEntry(
+ b"a/c",
+ stat.S_IFDIR,
+ b"d80c186a03f423a81b39df39dc87fd269736ca86",
+ ),
+ ],
+ list(sorted_tree_items(_TREE_ITEMS, True)),
+ )
+
+ test_sorted_tree_items_name_order = functest_builder(
+ _do_test_sorted_tree_items_name_order, _sorted_tree_items_py
+ )
+ test_sorted_tree_items_name_order_extension = ext_functest_builder(
+ _do_test_sorted_tree_items_name_order, sorted_tree_items
+ )
+
+ def test_check(self):
+ t = Tree
+ sha = hex_to_sha(a_sha)
+
+ # filenames
+ self.assertCheckSucceeds(t, b"100644 .a\0" + sha)
+ self.assertCheckFails(t, b"100644 \0" + sha)
+ self.assertCheckFails(t, b"100644 .\0" + sha)
+ self.assertCheckFails(t, b"100644 a/a\0" + sha)
+ self.assertCheckFails(t, b"100644 ..\0" + sha)
+ self.assertCheckFails(t, b"100644 .git\0" + sha)
+
+ # modes
+ self.assertCheckSucceeds(t, b"100644 a\0" + sha)
+ self.assertCheckSucceeds(t, b"100755 a\0" + sha)
+ self.assertCheckSucceeds(t, b"160000 a\0" + sha)
+ # TODO more whitelisted modes
+ self.assertCheckFails(t, b"123456 a\0" + sha)
+ self.assertCheckFails(t, b"123abc a\0" + sha)
+ # should fail check, but parses ok
+ self.assertCheckFails(t, b"0100644 foo\0" + sha)
+
+ # shas
+ self.assertCheckFails(t, b"100644 a\0" + (b"x" * 5))
+ self.assertCheckFails(t, b"100644 a\0" + (b"x" * 18) + b"\0")
+ self.assertCheckFails(t, b"100644 a\0" + (b"x" * 21) + b"\n100644 b\0" + sha)
+
+ # ordering
+ sha2 = hex_to_sha(b_sha)
+ self.assertCheckSucceeds(t, b"100644 a\0" + sha + b"100644 b\0" + sha)
+ self.assertCheckSucceeds(t, b"100644 a\0" + sha + b"100644 b\0" + sha2)
+ self.assertCheckFails(t, b"100644 a\0" + sha + b"100755 a\0" + sha2)
+ self.assertCheckFails(t, b"100644 b\0" + sha2 + b"100644 a\0" + sha)
+
+ def test_iter(self):
+ t = Tree()
+ t[b"foo"] = (0o100644, a_sha)
+ self.assertEqual({b"foo"}, set(t))
+
+
+class TagSerializeTests(TestCase):
+ def test_serialize_simple(self):
+ x = make_object(
+ Tag,
+ tagger=b"Jelmer Vernooij <jelmer@samba.org>",
+ name=b"0.1",
+ message=b"Tag 0.1",
+ object=(Blob, b"d80c186a03f423a81b39df39dc87fd269736ca86"),
+ tag_time=423423423,
+ tag_timezone=0,
+ )
+ self.assertEqual(
+ (
+ b"object d80c186a03f423a81b39df39dc87fd269736ca86\n"
+ b"type blob\n"
+ b"tag 0.1\n"
+ b"tagger Jelmer Vernooij <jelmer@samba.org> "
+ b"423423423 +0000\n"
+ b"\n"
+ b"Tag 0.1"
+ ),
+ x.as_raw_string(),
+ )
+
+ def test_serialize_none_message(self):
+ x = make_object(
+ Tag,
+ tagger=b"Jelmer Vernooij <jelmer@samba.org>",
+ name=b"0.1",
+ message=None,
+ object=(Blob, b"d80c186a03f423a81b39df39dc87fd269736ca86"),
+ tag_time=423423423,
+ tag_timezone=0,
+ )
+ self.assertEqual(
+ (
+ b"object d80c186a03f423a81b39df39dc87fd269736ca86\n"
+ b"type blob\n"
+ b"tag 0.1\n"
+ b"tagger Jelmer Vernooij <jelmer@samba.org> "
+ b"423423423 +0000\n"
+ ),
+ x.as_raw_string(),
+ )
+
+
+default_tagger = (
+ b"Linus Torvalds <torvalds@woody.linux-foundation.org> " b"1183319674 -0700"
+)
+default_message = b"""Linux 2.6.22-rc7
+-----BEGIN PGP SIGNATURE-----
+Version: GnuPG v1.4.7 (GNU/Linux)
+
+iD8DBQBGiAaAF3YsRnbiHLsRAitMAKCiLboJkQECM/jpYsY3WPfvUgLXkACgg3ql
+OK2XeQOiEeXtT76rV4t2WR4=
+=ivrA
+-----END PGP SIGNATURE-----
+"""
+
+
+class TagParseTests(ShaFileCheckTests):
+ def make_tag_lines(
+ self,
+ object_sha=b"a38d6181ff27824c79fc7df825164a212eff6a3f",
+ object_type_name=b"commit",
+ name=b"v2.6.22-rc7",
+ tagger=default_tagger,
+ message=default_message,
+ ):
+ lines = []
+ if object_sha is not None:
+ lines.append(b"object " + object_sha)
+ if object_type_name is not None:
+ lines.append(b"type " + object_type_name)
+ if name is not None:
+ lines.append(b"tag " + name)
+ if tagger is not None:
+ lines.append(b"tagger " + tagger)
+ if message is not None:
+ lines.append(b"")
+ lines.append(message)
+ return lines
+
+ def make_tag_text(self, **kwargs):
+ return b"\n".join(self.make_tag_lines(**kwargs))
+
+ def test_parse(self):
+ x = Tag()
+ x.set_raw_string(self.make_tag_text())
+ self.assertEqual(
+ b"Linus Torvalds <torvalds@woody.linux-foundation.org>", x.tagger
+ )
+ self.assertEqual(b"v2.6.22-rc7", x.name)
+ object_type, object_sha = x.object
+ self.assertEqual(b"a38d6181ff27824c79fc7df825164a212eff6a3f", object_sha)
+ self.assertEqual(Commit, object_type)
+ self.assertEqual(
+ datetime.datetime.utcfromtimestamp(x.tag_time),
+ datetime.datetime(2007, 7, 1, 19, 54, 34),
+ )
+ self.assertEqual(-25200, x.tag_timezone)
+
+ def test_parse_no_tagger(self):
+ x = Tag()
+ x.set_raw_string(self.make_tag_text(tagger=None))
+ self.assertEqual(None, x.tagger)
+ self.assertEqual(b"v2.6.22-rc7", x.name)
+ self.assertEqual(None, x.tag_time)
+
+ def test_parse_no_message(self):
+ x = Tag()
+ x.set_raw_string(self.make_tag_text(message=None))
+ self.assertEqual(None, x.message)
+ self.assertEqual(
+ b"Linus Torvalds <torvalds@woody.linux-foundation.org>", x.tagger
+ )
+ self.assertEqual(
+ datetime.datetime.utcfromtimestamp(x.tag_time),
+ datetime.datetime(2007, 7, 1, 19, 54, 34),
+ )
+ self.assertEqual(-25200, x.tag_timezone)
+ self.assertEqual(b"v2.6.22-rc7", x.name)
+
+ def test_check(self):
+ self.assertCheckSucceeds(Tag, self.make_tag_text())
+ self.assertCheckFails(Tag, self.make_tag_text(object_sha=None))
+ self.assertCheckFails(Tag, self.make_tag_text(object_type_name=None))
+ self.assertCheckFails(Tag, self.make_tag_text(name=None))
+ self.assertCheckFails(Tag, self.make_tag_text(name=b""))
+ self.assertCheckFails(Tag, self.make_tag_text(object_type_name=b"foobar"))
+ self.assertCheckFails(
+ Tag,
+ self.make_tag_text(
+ tagger=b"some guy without an email address 1183319674 -0700"
+ ),
+ )
+ self.assertCheckFails(
+ Tag,
+ self.make_tag_text(
+ tagger=(
+ b"Linus Torvalds <torvalds@woody.linux-foundation.org> "
+ b"Sun 7 Jul 2007 12:54:34 +0700"
+ )
+ ),
+ )
+ self.assertCheckFails(Tag, self.make_tag_text(object_sha=b"xxx"))
+
+ def test_check_tag_with_unparseable_field(self):
+ self.assertCheckFails(
+ Tag,
+ self.make_tag_text(
+ tagger=(
+ b"Linus Torvalds <torvalds@woody.linux-foundation.org> "
+ b"423423+0000"
+ )
+ ),
+ )
+
+ def test_check_tag_with_overflow_time(self):
+ """Date with overflow should raise an ObjectFormatException when checked."""
+ author = f"Some Dude <some@dude.org> {MAX_TIME + 1} +0000"
+ tag = Tag.from_string(self.make_tag_text(tagger=(author.encode())))
+ with self.assertRaises(ObjectFormatException):
+ tag.check()
+
+ def test_check_duplicates(self):
+ # duplicate each of the header fields
+ for i in range(4):
+ lines = self.make_tag_lines()
+ lines.insert(i, lines[i])
+ self.assertCheckFails(Tag, b"\n".join(lines))
+
+ def test_check_order(self):
+ lines = self.make_tag_lines()
+ headers = lines[:4]
+ rest = lines[4:]
+ # of all possible permutations, ensure only the original succeeds
+ for perm in permutations(headers):
+ perm = list(perm)
+ text = b"\n".join(perm + rest)
+ if perm == headers:
+ self.assertCheckSucceeds(Tag, text)
+ else:
+ self.assertCheckFails(Tag, text)
+
+ def test_tree_copy_after_update(self):
+ """Check Tree.id is correctly updated when the tree is copied after updated."""
+ shas = []
+ tree = Tree()
+ shas.append(tree.id)
+ tree.add(b"data", 0o644, Blob().id)
+ copied = tree.copy()
+ shas.append(tree.id)
+ shas.append(copied.id)
+
+ self.assertNotIn(shas[0], shas[1:])
+ self.assertEqual(shas[1], shas[2])
+
+
+class CheckTests(TestCase):
+ def test_check_hexsha(self):
+ check_hexsha(a_sha, "failed to check good sha")
+ self.assertRaises(
+ ObjectFormatException, check_hexsha, b"1" * 39, "sha too short"
+ )
+ self.assertRaises(
+ ObjectFormatException, check_hexsha, b"1" * 41, "sha too long"
+ )
+ self.assertRaises(
+ ObjectFormatException,
+ check_hexsha,
+ b"x" * 40,
+ "invalid characters",
+ )
+
+ def test_check_identity(self):
+ check_identity(
+ b"Dave Borowitz <dborowitz@google.com>",
+ "failed to check good identity",
+ )
+ check_identity(b" <dborowitz@google.com>", "failed to check good identity")
+ self.assertRaises(
+ ObjectFormatException,
+ check_identity,
+ b"<dborowitz@google.com>",
+ "no space before email",
+ )
+ self.assertRaises(
+ ObjectFormatException, check_identity, b"Dave Borowitz", "no email"
+ )
+ self.assertRaises(
+ ObjectFormatException,
+ check_identity,
+ b"Dave Borowitz <dborowitz",
+ "incomplete email",
+ )
+ self.assertRaises(
+ ObjectFormatException,
+ check_identity,
+ b"dborowitz@google.com>",
+ "incomplete email",
+ )
+ self.assertRaises(
+ ObjectFormatException,
+ check_identity,
+ b"Dave Borowitz <<dborowitz@google.com>",
+ "typo",
+ )
+ self.assertRaises(
+ ObjectFormatException,
+ check_identity,
+ b"Dave Borowitz <dborowitz@google.com>>",
+ "typo",
+ )
+ self.assertRaises(
+ ObjectFormatException,
+ check_identity,
+ b"Dave Borowitz <dborowitz@google.com>xxx",
+ "trailing characters",
+ )
+ self.assertRaises(
+ ObjectFormatException,
+ check_identity,
+ b"Dave Borowitz <dborowitz@google.com>xxx",
+ "trailing characters",
+ )
+ self.assertRaises(
+ ObjectFormatException,
+ check_identity,
+ b"Dave<Borowitz <dborowitz@google.com>",
+ "reserved byte in name",
+ )
+ self.assertRaises(
+ ObjectFormatException,
+ check_identity,
+ b"Dave>Borowitz <dborowitz@google.com>",
+ "reserved byte in name",
+ )
+ self.assertRaises(
+ ObjectFormatException,
+ check_identity,
+ b"Dave\0Borowitz <dborowitz@google.com>",
+ "null byte",
+ )
+ self.assertRaises(
+ ObjectFormatException,
+ check_identity,
+ b"Dave\nBorowitz <dborowitz@google.com>",
+ "newline byte",
+ )
+
+
+class TimezoneTests(TestCase):
+ def test_parse_timezone_utc(self):
+ self.assertEqual((0, False), parse_timezone(b"+0000"))
+
+ def test_parse_timezone_utc_negative(self):
+ self.assertEqual((0, True), parse_timezone(b"-0000"))
+
+ def test_generate_timezone_utc(self):
+ self.assertEqual(b"+0000", format_timezone(0))
+
+ def test_generate_timezone_utc_negative(self):
+ self.assertEqual(b"-0000", format_timezone(0, True))
+
+ def test_parse_timezone_cet(self):
+ self.assertEqual((60 * 60, False), parse_timezone(b"+0100"))
+
+ def test_format_timezone_cet(self):
+ self.assertEqual(b"+0100", format_timezone(60 * 60))
+
+ def test_format_timezone_pdt(self):
+ self.assertEqual(b"-0400", format_timezone(-4 * 60 * 60))
+
+ def test_parse_timezone_pdt(self):
+ self.assertEqual((-4 * 60 * 60, False), parse_timezone(b"-0400"))
+
+ def test_format_timezone_pdt_half(self):
+ self.assertEqual(b"-0440", format_timezone(int(((-4 * 60) - 40) * 60)))
+
+ def test_format_timezone_double_negative(self):
+ self.assertEqual(b"--700", format_timezone(int((7 * 60) * 60), True))
+
+ def test_parse_timezone_pdt_half(self):
+ self.assertEqual((((-4 * 60) - 40) * 60, False), parse_timezone(b"-0440"))
+
+ def test_parse_timezone_double_negative(self):
+ self.assertEqual((int((7 * 60) * 60), False), parse_timezone(b"+700"))
+ self.assertEqual((int((7 * 60) * 60), True), parse_timezone(b"--700"))
+
+
+class ShaFileCopyTests(TestCase):
+ def assert_copy(self, orig):
+ oclass = object_class(orig.type_num)
+
+ copy = orig.copy()
+ self.assertIsInstance(copy, oclass)
+ self.assertEqual(copy, orig)
+ self.assertIsNot(copy, orig)
+
+ def test_commit_copy(self):
+ attrs = {
+ "tree": b"d80c186a03f423a81b39df39dc87fd269736ca86",
+ "parents": [
+ b"ab64bbdcc51b170d21588e5c5d391ee5c0c96dfd",
+ b"4cffe90e0a41ad3f5190079d7c8f036bde29cbe6",
+ ],
+ "author": b"James Westby <jw+debian@jameswestby.net>",
+ "committer": b"James Westby <jw+debian@jameswestby.net>",
+ "commit_time": 1174773719,
+ "author_time": 1174773719,
+ "commit_timezone": 0,
+ "author_timezone": 0,
+ "message": b"Merge ../b\n",
+ }
+ commit = make_commit(**attrs)
+ self.assert_copy(commit)
+
+ def test_blob_copy(self):
+ blob = make_object(Blob, data=b"i am a blob")
+ self.assert_copy(blob)
+
+ def test_tree_copy(self):
+ blob = make_object(Blob, data=b"i am a blob")
+ tree = Tree()
+ tree[b"blob"] = (stat.S_IFREG, blob.id)
+ self.assert_copy(tree)
+
+ def test_tag_copy(self):
+ tag = make_object(
+ Tag,
+ name=b"tag",
+ message=b"",
+ tagger=b"Tagger <test@example.com>",
+ tag_time=12345,
+ tag_timezone=0,
+ object=(Commit, b"0" * 40),
+ )
+ self.assert_copy(tag)
+
+
+class ShaFileSerializeTests(TestCase):
+ """`ShaFile` objects only gets serialized once if they haven't changed."""
+
+ @contextmanager
+ def assert_serialization_on_change(
+ self, obj, needs_serialization_after_change=True
+ ):
+ old_id = obj.id
+ self.assertFalse(obj._needs_serialization)
+
+ yield obj
+
+ if needs_serialization_after_change:
+ self.assertTrue(obj._needs_serialization)
+ else:
+ self.assertFalse(obj._needs_serialization)
+ new_id = obj.id
+ self.assertFalse(obj._needs_serialization)
+ self.assertNotEqual(old_id, new_id)
+
+ def test_commit_serialize(self):
+ attrs = {
+ "tree": b"d80c186a03f423a81b39df39dc87fd269736ca86",
+ "parents": [
+ b"ab64bbdcc51b170d21588e5c5d391ee5c0c96dfd",
+ b"4cffe90e0a41ad3f5190079d7c8f036bde29cbe6",
+ ],
+ "author": b"James Westby <jw+debian@jameswestby.net>",
+ "committer": b"James Westby <jw+debian@jameswestby.net>",
+ "commit_time": 1174773719,
+ "author_time": 1174773719,
+ "commit_timezone": 0,
+ "author_timezone": 0,
+ "message": b"Merge ../b\n",
+ }
+ commit = make_commit(**attrs)
+
+ with self.assert_serialization_on_change(commit):
+ commit.parents = [b"ab64bbdcc51b170d21588e5c5d391ee5c0c96dfd"]
+
+ def test_blob_serialize(self):
+ blob = make_object(Blob, data=b"i am a blob")
+
+ with self.assert_serialization_on_change(
+ blob, needs_serialization_after_change=False
+ ):
+ blob.data = b"i am another blob"
+
+ def test_tree_serialize(self):
+ blob = make_object(Blob, data=b"i am a blob")
+ tree = Tree()
+ tree[b"blob"] = (stat.S_IFREG, blob.id)
+
+ with self.assert_serialization_on_change(tree):
+ tree[b"blob2"] = (stat.S_IFREG, blob.id)
+
+ def test_tag_serialize(self):
+ tag = make_object(
+ Tag,
+ name=b"tag",
+ message=b"",
+ tagger=b"Tagger <test@example.com>",
+ tag_time=12345,
+ tag_timezone=0,
+ object=(Commit, b"0" * 40),
+ )
+
+ with self.assert_serialization_on_change(tag):
+ tag.message = b"new message"
+
+ def test_tag_serialize_time_error(self):
+ with self.assertRaises(ObjectFormatException):
+ tag = make_object(
+ Tag,
+ name=b"tag",
+ message=b"some message",
+ tagger=b"Tagger <test@example.com> 1174773719+0000",
+ object=(Commit, b"0" * 40),
+ )
+ tag._deserialize(tag._serialize())
+
+
+class PrettyFormatTreeEntryTests(TestCase):
+ def test_format(self):
+ self.assertEqual(
+ "40000 tree 40820c38cfb182ce6c8b261555410d8382a5918b\tfoo\n",
+ pretty_format_tree_entry(
+ b"foo", 0o40000, b"40820c38cfb182ce6c8b261555410d8382a5918b"
+ ),
+ )
blob - /dev/null
blob + 185c1fe9b84465db4ab3418148e562c98e85716c (mode 644)
--- /dev/null
+++ tests/test_objectspec.py
+# test_objectspec.py -- tests for objectspec.py
+# Copyright (C) 2014 Jelmer Vernooij <jelmer@jelmer.uk>
+#
+# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
+# General Public License as public by the Free Software Foundation; version 2.0
+# or (at your option) any later version. You can redistribute it and/or
+# modify it under the terms of either of these two licenses.
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# You should have received a copy of the licenses; if not, see
+# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
+# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
+# License, Version 2.0.
+#
+
+"""Tests for revision spec parsing."""
+
+# TODO: Round-trip parse-serialize-parse and serialize-parse-serialize tests.
+
+from dulwich.objects import Blob
+from dulwich.objectspec import (
+ parse_commit,
+ parse_commit_range,
+ parse_object,
+ parse_ref,
+ parse_refs,
+ parse_reftuple,
+ parse_reftuples,
+ parse_tree,
+)
+from dulwich.repo import MemoryRepo
+
+from . import TestCase
+from .utils import build_commit_graph
+
+
+class ParseObjectTests(TestCase):
+ """Test parse_object."""
+
+ def test_nonexistent(self):
+ r = MemoryRepo()
+ self.assertRaises(KeyError, parse_object, r, "thisdoesnotexist")
+
+ def test_blob_by_sha(self):
+ r = MemoryRepo()
+ b = Blob.from_string(b"Blah")
+ r.object_store.add_object(b)
+ self.assertEqual(b, parse_object(r, b.id))
+
+
+class ParseCommitRangeTests(TestCase):
+ """Test parse_commit_range."""
+
+ def test_nonexistent(self):
+ r = MemoryRepo()
+ self.assertRaises(KeyError, parse_commit_range, r, "thisdoesnotexist")
+
+ def test_commit_by_sha(self):
+ r = MemoryRepo()
+ c1, c2, c3 = build_commit_graph(r.object_store, [[1], [2, 1], [3, 1, 2]])
+ self.assertEqual([c1], list(parse_commit_range(r, c1.id)))
+
+
+class ParseCommitTests(TestCase):
+ """Test parse_commit."""
+
+ def test_nonexistent(self):
+ r = MemoryRepo()
+ self.assertRaises(KeyError, parse_commit, r, "thisdoesnotexist")
+
+ def test_commit_by_sha(self):
+ r = MemoryRepo()
+ [c1] = build_commit_graph(r.object_store, [[1]])
+ self.assertEqual(c1, parse_commit(r, c1.id))
+
+ def test_commit_by_short_sha(self):
+ r = MemoryRepo()
+ [c1] = build_commit_graph(r.object_store, [[1]])
+ self.assertEqual(c1, parse_commit(r, c1.id[:10]))
+
+
+class ParseRefTests(TestCase):
+ def test_nonexistent(self):
+ r = {}
+ self.assertRaises(KeyError, parse_ref, r, b"thisdoesnotexist")
+
+ def test_ambiguous_ref(self):
+ r = {
+ b"ambig1": "bla",
+ b"refs/ambig1": "bla",
+ b"refs/tags/ambig1": "bla",
+ b"refs/heads/ambig1": "bla",
+ b"refs/remotes/ambig1": "bla",
+ b"refs/remotes/ambig1/HEAD": "bla",
+ }
+ self.assertEqual(b"ambig1", parse_ref(r, b"ambig1"))
+
+ def test_ambiguous_ref2(self):
+ r = {
+ b"refs/ambig2": "bla",
+ b"refs/tags/ambig2": "bla",
+ b"refs/heads/ambig2": "bla",
+ b"refs/remotes/ambig2": "bla",
+ b"refs/remotes/ambig2/HEAD": "bla",
+ }
+ self.assertEqual(b"refs/ambig2", parse_ref(r, b"ambig2"))
+
+ def test_ambiguous_tag(self):
+ r = {
+ b"refs/tags/ambig3": "bla",
+ b"refs/heads/ambig3": "bla",
+ b"refs/remotes/ambig3": "bla",
+ b"refs/remotes/ambig3/HEAD": "bla",
+ }
+ self.assertEqual(b"refs/tags/ambig3", parse_ref(r, b"ambig3"))
+
+ def test_ambiguous_head(self):
+ r = {
+ b"refs/heads/ambig4": "bla",
+ b"refs/remotes/ambig4": "bla",
+ b"refs/remotes/ambig4/HEAD": "bla",
+ }
+ self.assertEqual(b"refs/heads/ambig4", parse_ref(r, b"ambig4"))
+
+ def test_ambiguous_remote(self):
+ r = {b"refs/remotes/ambig5": "bla", b"refs/remotes/ambig5/HEAD": "bla"}
+ self.assertEqual(b"refs/remotes/ambig5", parse_ref(r, b"ambig5"))
+
+ def test_ambiguous_remote_head(self):
+ r = {b"refs/remotes/ambig6/HEAD": "bla"}
+ self.assertEqual(b"refs/remotes/ambig6/HEAD", parse_ref(r, b"ambig6"))
+
+ def test_heads_full(self):
+ r = {b"refs/heads/foo": "bla"}
+ self.assertEqual(b"refs/heads/foo", parse_ref(r, b"refs/heads/foo"))
+
+ def test_heads_partial(self):
+ r = {b"refs/heads/foo": "bla"}
+ self.assertEqual(b"refs/heads/foo", parse_ref(r, b"heads/foo"))
+
+ def test_tags_partial(self):
+ r = {b"refs/tags/foo": "bla"}
+ self.assertEqual(b"refs/tags/foo", parse_ref(r, b"tags/foo"))
+
+
+class ParseRefsTests(TestCase):
+ def test_nonexistent(self):
+ r = {}
+ self.assertRaises(KeyError, parse_refs, r, [b"thisdoesnotexist"])
+
+ def test_head(self):
+ r = {b"refs/heads/foo": "bla"}
+ self.assertEqual([b"refs/heads/foo"], parse_refs(r, [b"foo"]))
+
+ def test_full(self):
+ r = {b"refs/heads/foo": "bla"}
+ self.assertEqual([b"refs/heads/foo"], parse_refs(r, b"refs/heads/foo"))
+
+
+class ParseReftupleTests(TestCase):
+ def test_nonexistent(self):
+ r = {}
+ self.assertRaises(KeyError, parse_reftuple, r, r, b"thisdoesnotexist")
+
+ def test_head(self):
+ r = {b"refs/heads/foo": "bla"}
+ self.assertEqual(
+ (b"refs/heads/foo", b"refs/heads/foo", False),
+ parse_reftuple(r, r, b"foo"),
+ )
+ self.assertEqual(
+ (b"refs/heads/foo", b"refs/heads/foo", True),
+ parse_reftuple(r, r, b"+foo"),
+ )
+ self.assertEqual(
+ (b"refs/heads/foo", b"refs/heads/foo", True),
+ parse_reftuple(r, {}, b"+foo"),
+ )
+ self.assertEqual(
+ (b"refs/heads/foo", b"refs/heads/foo", True),
+ parse_reftuple(r, {}, b"foo", True),
+ )
+
+ def test_full(self):
+ r = {b"refs/heads/foo": "bla"}
+ self.assertEqual(
+ (b"refs/heads/foo", b"refs/heads/foo", False),
+ parse_reftuple(r, r, b"refs/heads/foo"),
+ )
+
+ def test_no_left_ref(self):
+ r = {b"refs/heads/foo": "bla"}
+ self.assertEqual(
+ (None, b"refs/heads/foo", False),
+ parse_reftuple(r, r, b":refs/heads/foo"),
+ )
+
+ def test_no_right_ref(self):
+ r = {b"refs/heads/foo": "bla"}
+ self.assertEqual(
+ (b"refs/heads/foo", None, False),
+ parse_reftuple(r, r, b"refs/heads/foo:"),
+ )
+
+ def test_default_with_string(self):
+ r = {b"refs/heads/foo": "bla"}
+ self.assertEqual(
+ (b"refs/heads/foo", b"refs/heads/foo", False),
+ parse_reftuple(r, r, "foo"),
+ )
+
+
+class ParseReftuplesTests(TestCase):
+ def test_nonexistent(self):
+ r = {}
+ self.assertRaises(KeyError, parse_reftuples, r, r, [b"thisdoesnotexist"])
+
+ def test_head(self):
+ r = {b"refs/heads/foo": "bla"}
+ self.assertEqual(
+ [(b"refs/heads/foo", b"refs/heads/foo", False)],
+ parse_reftuples(r, r, [b"foo"]),
+ )
+
+ def test_full(self):
+ r = {b"refs/heads/foo": "bla"}
+ self.assertEqual(
+ [(b"refs/heads/foo", b"refs/heads/foo", False)],
+ parse_reftuples(r, r, b"refs/heads/foo"),
+ )
+ r = {b"refs/heads/foo": "bla"}
+ self.assertEqual(
+ [(b"refs/heads/foo", b"refs/heads/foo", True)],
+ parse_reftuples(r, r, b"refs/heads/foo", True),
+ )
+
+
+class ParseTreeTests(TestCase):
+ """Test parse_tree."""
+
+ def test_nonexistent(self):
+ r = MemoryRepo()
+ self.assertRaises(KeyError, parse_tree, r, "thisdoesnotexist")
+
+ def test_from_commit(self):
+ r = MemoryRepo()
+ c1, c2, c3 = build_commit_graph(r.object_store, [[1], [2, 1], [3, 1, 2]])
+ self.assertEqual(r[c1.tree], parse_tree(r, c1.id))
+ self.assertEqual(r[c1.tree], parse_tree(r, c1.tree))
+
+ def test_from_ref(self):
+ r = MemoryRepo()
+ c1, c2, c3 = build_commit_graph(r.object_store, [[1], [2, 1], [3, 1, 2]])
+ r.refs[b"refs/heads/foo"] = c1.id
+ self.assertEqual(r[c1.tree], parse_tree(r, b"foo"))
blob - /dev/null
blob + c796657042e5c56de556eed051de27fafc1be7e2 (mode 644)
--- /dev/null
+++ tests/test_pack.py
+# test_pack.py -- Tests for the handling of git packs.
+# Copyright (C) 2007 James Westby <jw+debian@jameswestby.net>
+# Copyright (C) 2008 Jelmer Vernooij <jelmer@jelmer.uk>
+#
+# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
+# General Public License as public by the Free Software Foundation; version 2.0
+# or (at your option) any later version. You can redistribute it and/or
+# modify it under the terms of either of these two licenses.
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# You should have received a copy of the licenses; if not, see
+# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
+# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
+# License, Version 2.0.
+#
+
+"""Tests for Dulwich packs."""
+
+import os
+import shutil
+import sys
+import tempfile
+import zlib
+from hashlib import sha1
+from io import BytesIO
+from typing import Set
+
+from dulwich.errors import ApplyDeltaError, ChecksumMismatch
+from dulwich.file import GitFile
+from dulwich.object_store import MemoryObjectStore
+from dulwich.objects import Blob, Commit, Tree, hex_to_sha, sha_to_hex
+from dulwich.pack import (
+ OFS_DELTA,
+ REF_DELTA,
+ DeltaChainIterator,
+ MemoryPackIndex,
+ Pack,
+ PackData,
+ PackStreamReader,
+ UnpackedObject,
+ UnresolvedDeltas,
+ _delta_encode_size,
+ _encode_copy_operation,
+ apply_delta,
+ compute_file_sha,
+ create_delta,
+ deltify_pack_objects,
+ load_pack_index,
+ read_zlib_chunks,
+ unpack_object,
+ write_pack,
+ write_pack_header,
+ write_pack_index_v1,
+ write_pack_index_v2,
+ write_pack_object,
+)
+
+from . import TestCase
+from .utils import build_pack, make_object
+
+pack1_sha = b"bc63ddad95e7321ee734ea11a7a62d314e0d7481"
+
+a_sha = b"6f670c0fb53f9463760b7295fbb814e965fb20c8"
+tree_sha = b"b2a2766a2879c209ab1176e7e778b81ae422eeaa"
+commit_sha = b"f18faa16531ac570a3fdc8c7ca16682548dafd12"
+indexmode = "0o100644" if sys.platform != "win32" else "0o100666"
+
+
+class PackTests(TestCase):
+ """Base class for testing packs."""
+
+ def setUp(self):
+ super().setUp()
+ self.tempdir = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, self.tempdir)
+
+ datadir = os.path.abspath(
+ os.path.join(os.path.dirname(__file__), "../testdata/packs")
+ )
+
+ def get_pack_index(self, sha):
+ """Returns a PackIndex from the datadir with the given sha."""
+ return load_pack_index(
+ os.path.join(self.datadir, "pack-%s.idx" % sha.decode("ascii"))
+ )
+
+ def get_pack_data(self, sha):
+ """Returns a PackData object from the datadir with the given sha."""
+ return PackData(
+ os.path.join(self.datadir, "pack-%s.pack" % sha.decode("ascii"))
+ )
+
+ def get_pack(self, sha):
+ return Pack(os.path.join(self.datadir, "pack-%s" % sha.decode("ascii")))
+
+ def assertSucceeds(self, func, *args, **kwargs):
+ try:
+ func(*args, **kwargs)
+ except ChecksumMismatch as e:
+ self.fail(e)
+
+
+class PackIndexTests(PackTests):
+ """Class that tests the index of packfiles."""
+
+ def test_object_offset(self):
+ """Tests that the correct object offset is returned from the index."""
+ p = self.get_pack_index(pack1_sha)
+ self.assertRaises(KeyError, p.object_offset, pack1_sha)
+ self.assertEqual(p.object_offset(a_sha), 178)
+ self.assertEqual(p.object_offset(tree_sha), 138)
+ self.assertEqual(p.object_offset(commit_sha), 12)
+
+ def test_object_sha1(self):
+ """Tests that the correct object offset is returned from the index."""
+ p = self.get_pack_index(pack1_sha)
+ self.assertRaises(KeyError, p.object_sha1, 876)
+ self.assertEqual(p.object_sha1(178), hex_to_sha(a_sha))
+ self.assertEqual(p.object_sha1(138), hex_to_sha(tree_sha))
+ self.assertEqual(p.object_sha1(12), hex_to_sha(commit_sha))
+
+ def test_index_len(self):
+ p = self.get_pack_index(pack1_sha)
+ self.assertEqual(3, len(p))
+
+ def test_get_stored_checksum(self):
+ p = self.get_pack_index(pack1_sha)
+ self.assertEqual(
+ b"f2848e2ad16f329ae1c92e3b95e91888daa5bd01",
+ sha_to_hex(p.get_stored_checksum()),
+ )
+ self.assertEqual(
+ b"721980e866af9a5f93ad674144e1459b8ba3e7b7",
+ sha_to_hex(p.get_pack_checksum()),
+ )
+
+ def test_index_check(self):
+ p = self.get_pack_index(pack1_sha)
+ self.assertSucceeds(p.check)
+
+ def test_iterentries(self):
+ p = self.get_pack_index(pack1_sha)
+ entries = [(sha_to_hex(s), o, c) for s, o, c in p.iterentries()]
+ self.assertEqual(
+ [
+ (b"6f670c0fb53f9463760b7295fbb814e965fb20c8", 178, None),
+ (b"b2a2766a2879c209ab1176e7e778b81ae422eeaa", 138, None),
+ (b"f18faa16531ac570a3fdc8c7ca16682548dafd12", 12, None),
+ ],
+ entries,
+ )
+
+ def test_iter(self):
+ p = self.get_pack_index(pack1_sha)
+ self.assertEqual({tree_sha, commit_sha, a_sha}, set(p))
+
+
+class TestPackDeltas(TestCase):
+ test_string1 = b"The answer was flailing in the wind"
+ test_string2 = b"The answer was falling down the pipe"
+ test_string3 = b"zzzzz"
+
+ test_string_empty = b""
+ test_string_big = b"Z" * 8192
+ test_string_huge = b"Z" * 100000
+
+ def _test_roundtrip(self, base, target):
+ self.assertEqual(
+ target, b"".join(apply_delta(base, list(create_delta(base, target))))
+ )
+
+ def test_nochange(self):
+ self._test_roundtrip(self.test_string1, self.test_string1)
+
+ def test_nochange_huge(self):
+ self._test_roundtrip(self.test_string_huge, self.test_string_huge)
+
+ def test_change(self):
+ self._test_roundtrip(self.test_string1, self.test_string2)
+
+ def test_rewrite(self):
+ self._test_roundtrip(self.test_string1, self.test_string3)
+
+ def test_empty_to_big(self):
+ self._test_roundtrip(self.test_string_empty, self.test_string_big)
+
+ def test_empty_to_huge(self):
+ self._test_roundtrip(self.test_string_empty, self.test_string_huge)
+
+ def test_huge_copy(self):
+ self._test_roundtrip(
+ self.test_string_huge + self.test_string1,
+ self.test_string_huge + self.test_string2,
+ )
+
+ def test_dest_overflow(self):
+ self.assertRaises(
+ ApplyDeltaError,
+ apply_delta,
+ b"a" * 0x10000,
+ b"\x80\x80\x04\x80\x80\x04\x80" + b"a" * 0x10000,
+ )
+ self.assertRaises(
+ ApplyDeltaError, apply_delta, b"", b"\x00\x80\x02\xb0\x11\x11"
+ )
+
+ def test_pypy_issue(self):
+ # Test for https://github.com/jelmer/dulwich/issues/509 /
+ # https://bitbucket.org/pypy/pypy/issues/2499/cpyext-pystring_asstring-doesnt-work
+ chunks = [
+ b"tree 03207ccf58880a748188836155ceed72f03d65d6\n"
+ b"parent 408fbab530fd4abe49249a636a10f10f44d07a21\n"
+ b"author Victor Stinner <victor.stinner@gmail.com> "
+ b"1421355207 +0100\n"
+ b"committer Victor Stinner <victor.stinner@gmail.com> "
+ b"1421355207 +0100\n"
+ b"\n"
+ b"Backout changeset 3a06020af8cf\n"
+ b"\nStreamWriter: close() now clears the reference to the "
+ b"transport\n"
+ b"\nStreamWriter now raises an exception if it is closed: "
+ b"write(), writelines(),\n"
+ b"write_eof(), can_write_eof(), get_extra_info(), drain().\n"
+ ]
+ delta = [
+ b"\xcd\x03\xad\x03]tree ff3c181a393d5a7270cddc01ea863818a8621ca8\n"
+ b"parent 20a103cc90135494162e819f98d0edfc1f1fba6b\x91]7\x0510738"
+ b"\x91\x99@\x0b10738 +0100\x93\x04\x01\xc9"
+ ]
+ res = apply_delta(chunks, delta)
+ expected = [
+ b"tree ff3c181a393d5a7270cddc01ea863818a8621ca8\n"
+ b"parent 20a103cc90135494162e819f98d0edfc1f1fba6b",
+ b"\nauthor Victor Stinner <victor.stinner@gmail.com> 14213",
+ b"10738",
+ b" +0100\ncommitter Victor Stinner <victor.stinner@gmail.com> " b"14213",
+ b"10738 +0100",
+ b"\n\nStreamWriter: close() now clears the reference to the "
+ b"transport\n\n"
+ b"StreamWriter now raises an exception if it is closed: "
+ b"write(), writelines(),\n"
+ b"write_eof(), can_write_eof(), get_extra_info(), drain().\n",
+ ]
+ self.assertEqual(b"".join(expected), b"".join(res))
+
+
+class TestPackData(PackTests):
+ """Tests getting the data from the packfile."""
+
+ def test_create_pack(self):
+ self.get_pack_data(pack1_sha).close()
+
+ def test_from_file(self):
+ path = os.path.join(self.datadir, "pack-%s.pack" % pack1_sha.decode("ascii"))
+ with open(path, "rb") as f:
+ PackData.from_file(f, os.path.getsize(path))
+
+ def test_pack_len(self):
+ with self.get_pack_data(pack1_sha) as p:
+ self.assertEqual(3, len(p))
+
+ def test_index_check(self):
+ with self.get_pack_data(pack1_sha) as p:
+ self.assertSucceeds(p.check)
+
+ def test_iter_unpacked(self):
+ with self.get_pack_data(pack1_sha) as p:
+ commit_data = (
+ b"tree b2a2766a2879c209ab1176e7e778b81ae422eeaa\n"
+ b"author James Westby <jw+debian@jameswestby.net> "
+ b"1174945067 +0100\n"
+ b"committer James Westby <jw+debian@jameswestby.net> "
+ b"1174945067 +0100\n"
+ b"\n"
+ b"Test commit\n"
+ )
+ blob_sha = b"6f670c0fb53f9463760b7295fbb814e965fb20c8"
+ tree_data = b"100644 a\0" + hex_to_sha(blob_sha)
+ actual = list(p.iter_unpacked())
+ self.assertEqual(
+ [
+ UnpackedObject(
+ offset=12,
+ pack_type_num=1,
+ decomp_chunks=[commit_data],
+ crc32=None,
+ ),
+ UnpackedObject(
+ offset=138,
+ pack_type_num=2,
+ decomp_chunks=[tree_data],
+ crc32=None,
+ ),
+ UnpackedObject(
+ offset=178,
+ pack_type_num=3,
+ decomp_chunks=[b"test 1\n"],
+ crc32=None,
+ ),
+ ],
+ actual,
+ )
+
+ def test_iterentries(self):
+ with self.get_pack_data(pack1_sha) as p:
+ entries = {(sha_to_hex(s), o, c) for s, o, c in p.iterentries()}
+ self.assertEqual(
+ {
+ (
+ b"6f670c0fb53f9463760b7295fbb814e965fb20c8",
+ 178,
+ 1373561701,
+ ),
+ (
+ b"b2a2766a2879c209ab1176e7e778b81ae422eeaa",
+ 138,
+ 912998690,
+ ),
+ (
+ b"f18faa16531ac570a3fdc8c7ca16682548dafd12",
+ 12,
+ 3775879613,
+ ),
+ },
+ entries,
+ )
+
+ def test_create_index_v1(self):
+ with self.get_pack_data(pack1_sha) as p:
+ filename = os.path.join(self.tempdir, "v1test.idx")
+ p.create_index_v1(filename)
+ idx1 = load_pack_index(filename)
+ idx2 = self.get_pack_index(pack1_sha)
+ self.assertEqual(oct(os.stat(filename).st_mode), indexmode)
+ self.assertEqual(idx1, idx2)
+
+ def test_create_index_v2(self):
+ with self.get_pack_data(pack1_sha) as p:
+ filename = os.path.join(self.tempdir, "v2test.idx")
+ p.create_index_v2(filename)
+ idx1 = load_pack_index(filename)
+ idx2 = self.get_pack_index(pack1_sha)
+ self.assertEqual(oct(os.stat(filename).st_mode), indexmode)
+ self.assertEqual(idx1, idx2)
+
+ def test_compute_file_sha(self):
+ f = BytesIO(b"abcd1234wxyz")
+ self.assertEqual(
+ sha1(b"abcd1234wxyz").hexdigest(), compute_file_sha(f).hexdigest()
+ )
+ self.assertEqual(
+ sha1(b"abcd1234wxyz").hexdigest(),
+ compute_file_sha(f, buffer_size=5).hexdigest(),
+ )
+ self.assertEqual(
+ sha1(b"abcd1234").hexdigest(),
+ compute_file_sha(f, end_ofs=-4).hexdigest(),
+ )
+ self.assertEqual(
+ sha1(b"1234wxyz").hexdigest(),
+ compute_file_sha(f, start_ofs=4).hexdigest(),
+ )
+ self.assertEqual(
+ sha1(b"1234").hexdigest(),
+ compute_file_sha(f, start_ofs=4, end_ofs=-4).hexdigest(),
+ )
+
+ def test_compute_file_sha_short_file(self):
+ f = BytesIO(b"abcd1234wxyz")
+ self.assertRaises(AssertionError, compute_file_sha, f, end_ofs=-20)
+ self.assertRaises(AssertionError, compute_file_sha, f, end_ofs=20)
+ self.assertRaises(
+ AssertionError, compute_file_sha, f, start_ofs=10, end_ofs=-12
+ )
+
+
+class TestPack(PackTests):
+ def test_len(self):
+ with self.get_pack(pack1_sha) as p:
+ self.assertEqual(3, len(p))
+
+ def test_contains(self):
+ with self.get_pack(pack1_sha) as p:
+ self.assertIn(tree_sha, p)
+
+ def test_get(self):
+ with self.get_pack(pack1_sha) as p:
+ self.assertEqual(type(p[tree_sha]), Tree)
+
+ def test_iter(self):
+ with self.get_pack(pack1_sha) as p:
+ self.assertEqual({tree_sha, commit_sha, a_sha}, set(p))
+
+ def test_iterobjects(self):
+ with self.get_pack(pack1_sha) as p:
+ expected = {p[s] for s in [commit_sha, tree_sha, a_sha]}
+ self.assertEqual(expected, set(list(p.iterobjects())))
+
+ def test_pack_tuples(self):
+ with self.get_pack(pack1_sha) as p:
+ tuples = p.pack_tuples()
+ expected = {(p[s], None) for s in [commit_sha, tree_sha, a_sha]}
+ self.assertEqual(expected, set(list(tuples)))
+ self.assertEqual(expected, set(list(tuples)))
+ self.assertEqual(3, len(tuples))
+
+ def test_get_object_at(self):
+ """Tests random access for non-delta objects."""
+ with self.get_pack(pack1_sha) as p:
+ obj = p[a_sha]
+ self.assertEqual(obj.type_name, b"blob")
+ self.assertEqual(obj.sha().hexdigest().encode("ascii"), a_sha)
+ obj = p[tree_sha]
+ self.assertEqual(obj.type_name, b"tree")
+ self.assertEqual(obj.sha().hexdigest().encode("ascii"), tree_sha)
+ obj = p[commit_sha]
+ self.assertEqual(obj.type_name, b"commit")
+ self.assertEqual(obj.sha().hexdigest().encode("ascii"), commit_sha)
+
+ def test_copy(self):
+ with self.get_pack(pack1_sha) as origpack:
+ self.assertSucceeds(origpack.index.check)
+ basename = os.path.join(self.tempdir, "Elch")
+ write_pack(basename, origpack.pack_tuples())
+
+ with Pack(basename) as newpack:
+ self.assertEqual(origpack, newpack)
+ self.assertSucceeds(newpack.index.check)
+ self.assertEqual(origpack.name(), newpack.name())
+ self.assertEqual(
+ origpack.index.get_pack_checksum(),
+ newpack.index.get_pack_checksum(),
+ )
+
+ wrong_version = origpack.index.version != newpack.index.version
+ orig_checksum = origpack.index.get_stored_checksum()
+ new_checksum = newpack.index.get_stored_checksum()
+ self.assertTrue(wrong_version or orig_checksum == new_checksum)
+
+ def test_commit_obj(self):
+ with self.get_pack(pack1_sha) as p:
+ commit = p[commit_sha]
+ self.assertEqual(b"James Westby <jw+debian@jameswestby.net>", commit.author)
+ self.assertEqual([], commit.parents)
+
+ def _copy_pack(self, origpack):
+ basename = os.path.join(self.tempdir, "somepack")
+ write_pack(basename, origpack.pack_tuples())
+ return Pack(basename)
+
+ def test_keep_no_message(self):
+ with self.get_pack(pack1_sha) as p:
+ p = self._copy_pack(p)
+
+ with p:
+ keepfile_name = p.keep()
+
+ # file should exist
+ self.assertTrue(os.path.exists(keepfile_name))
+
+ with open(keepfile_name) as f:
+ buf = f.read()
+ self.assertEqual("", buf)
+
+ def test_keep_message(self):
+ with self.get_pack(pack1_sha) as p:
+ p = self._copy_pack(p)
+
+ msg = b"some message"
+ with p:
+ keepfile_name = p.keep(msg)
+
+ # file should exist
+ self.assertTrue(os.path.exists(keepfile_name))
+
+ # and contain the right message, with a linefeed
+ with open(keepfile_name, "rb") as f:
+ buf = f.read()
+ self.assertEqual(msg + b"\n", buf)
+
+ def test_name(self):
+ with self.get_pack(pack1_sha) as p:
+ self.assertEqual(pack1_sha, p.name())
+
+ def test_length_mismatch(self):
+ with self.get_pack_data(pack1_sha) as data:
+ index = self.get_pack_index(pack1_sha)
+ Pack.from_objects(data, index).check_length_and_checksum()
+
+ data._file.seek(12)
+ bad_file = BytesIO()
+ write_pack_header(bad_file.write, 9999)
+ bad_file.write(data._file.read())
+ bad_file = BytesIO(bad_file.getvalue())
+ bad_data = PackData("", file=bad_file)
+ bad_pack = Pack.from_lazy_objects(lambda: bad_data, lambda: index)
+ self.assertRaises(AssertionError, lambda: bad_pack.data)
+ self.assertRaises(AssertionError, bad_pack.check_length_and_checksum)
+
+ def test_checksum_mismatch(self):
+ with self.get_pack_data(pack1_sha) as data:
+ index = self.get_pack_index(pack1_sha)
+ Pack.from_objects(data, index).check_length_and_checksum()
+
+ data._file.seek(0)
+ bad_file = BytesIO(data._file.read()[:-20] + (b"\xff" * 20))
+ bad_data = PackData("", file=bad_file)
+ bad_pack = Pack.from_lazy_objects(lambda: bad_data, lambda: index)
+ self.assertRaises(ChecksumMismatch, lambda: bad_pack.data)
+ self.assertRaises(ChecksumMismatch, bad_pack.check_length_and_checksum)
+
+ def test_iterobjects_2(self):
+ with self.get_pack(pack1_sha) as p:
+ objs = {o.id: o for o in p.iterobjects()}
+ self.assertEqual(3, len(objs))
+ self.assertEqual(sorted(objs), sorted(p.index))
+ self.assertIsInstance(objs[a_sha], Blob)
+ self.assertIsInstance(objs[tree_sha], Tree)
+ self.assertIsInstance(objs[commit_sha], Commit)
+
+ def test_iterobjects_subset(self):
+ with self.get_pack(pack1_sha) as p:
+ objs = {o.id: o for o in p.iterobjects_subset([commit_sha])}
+ self.assertEqual(1, len(objs))
+ self.assertIsInstance(objs[commit_sha], Commit)
+
+
+class TestThinPack(PackTests):
+ def setUp(self):
+ super().setUp()
+ self.store = MemoryObjectStore()
+ self.blobs = {}
+ for blob in (b"foo", b"bar", b"foo1234", b"bar2468"):
+ self.blobs[blob] = make_object(Blob, data=blob)
+ self.store.add_object(self.blobs[b"foo"])
+ self.store.add_object(self.blobs[b"bar"])
+
+ # Build a thin pack. 'foo' is as an external reference, 'bar' an
+ # internal reference.
+ self.pack_dir = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, self.pack_dir)
+ self.pack_prefix = os.path.join(self.pack_dir, "pack")
+
+ with open(self.pack_prefix + ".pack", "wb") as f:
+ build_pack(
+ f,
+ [
+ (REF_DELTA, (self.blobs[b"foo"].id, b"foo1234")),
+ (Blob.type_num, b"bar"),
+ (REF_DELTA, (self.blobs[b"bar"].id, b"bar2468")),
+ ],
+ store=self.store,
+ )
+
+ # Index the new pack.
+ with self.make_pack(True) as pack:
+ with PackData(pack._data_path) as data:
+ data.create_index(
+ self.pack_prefix + ".idx", resolve_ext_ref=pack.resolve_ext_ref
+ )
+
+ del self.store[self.blobs[b"bar"].id]
+
+ def make_pack(self, resolve_ext_ref):
+ return Pack(
+ self.pack_prefix,
+ resolve_ext_ref=self.store.get_raw if resolve_ext_ref else None,
+ )
+
+ def test_get_raw(self):
+ with self.make_pack(False) as p:
+ self.assertRaises(KeyError, p.get_raw, self.blobs[b"foo1234"].id)
+ with self.make_pack(True) as p:
+ self.assertEqual((3, b"foo1234"), p.get_raw(self.blobs[b"foo1234"].id))
+
+ def test_get_unpacked_object(self):
+ self.maxDiff = None
+ with self.make_pack(False) as p:
+ expected = UnpackedObject(
+ 7,
+ delta_base=b"\x19\x10(\x15f=#\xf8\xb7ZG\xe7\xa0\x19e\xdc\xdc\x96F\x8c",
+ decomp_chunks=[b"\x03\x07\x90\x03\x041234"],
+ )
+ expected.offset = 12
+ got = p.get_unpacked_object(self.blobs[b"foo1234"].id)
+ self.assertEqual(expected, got)
+ with self.make_pack(True) as p:
+ expected = UnpackedObject(
+ 7,
+ delta_base=b"\x19\x10(\x15f=#\xf8\xb7ZG\xe7\xa0\x19e\xdc\xdc\x96F\x8c",
+ decomp_chunks=[b"\x03\x07\x90\x03\x041234"],
+ )
+ expected.offset = 12
+ got = p.get_unpacked_object(self.blobs[b"foo1234"].id)
+ self.assertEqual(
+ expected,
+ got,
+ )
+
+ def test_iterobjects(self):
+ with self.make_pack(False) as p:
+ self.assertRaises(UnresolvedDeltas, list, p.iterobjects())
+ with self.make_pack(True) as p:
+ self.assertEqual(
+ sorted(
+ [
+ self.blobs[b"foo1234"].id,
+ self.blobs[b"bar"].id,
+ self.blobs[b"bar2468"].id,
+ ]
+ ),
+ sorted(o.id for o in p.iterobjects()),
+ )
+
+
+class WritePackTests(TestCase):
+ def test_write_pack_header(self):
+ f = BytesIO()
+ write_pack_header(f.write, 42)
+ self.assertEqual(b"PACK\x00\x00\x00\x02\x00\x00\x00*", f.getvalue())
+
+ def test_write_pack_object(self):
+ f = BytesIO()
+ f.write(b"header")
+ offset = f.tell()
+ crc32 = write_pack_object(f.write, Blob.type_num, b"blob")
+ self.assertEqual(crc32, zlib.crc32(f.getvalue()[6:]) & 0xFFFFFFFF)
+
+ f.write(b"x") # unpack_object needs extra trailing data.
+ f.seek(offset)
+ unpacked, unused = unpack_object(f.read, compute_crc32=True)
+ self.assertEqual(Blob.type_num, unpacked.pack_type_num)
+ self.assertEqual(Blob.type_num, unpacked.obj_type_num)
+ self.assertEqual([b"blob"], unpacked.decomp_chunks)
+ self.assertEqual(crc32, unpacked.crc32)
+ self.assertEqual(b"x", unused)
+
+ def test_write_pack_object_sha(self):
+ f = BytesIO()
+ f.write(b"header")
+ offset = f.tell()
+ sha_a = sha1(b"foo")
+ sha_b = sha_a.copy()
+ write_pack_object(f.write, Blob.type_num, b"blob", sha=sha_a)
+ self.assertNotEqual(sha_a.digest(), sha_b.digest())
+ sha_b.update(f.getvalue()[offset:])
+ self.assertEqual(sha_a.digest(), sha_b.digest())
+
+ def test_write_pack_object_compression_level(self):
+ f = BytesIO()
+ f.write(b"header")
+ offset = f.tell()
+ sha_a = sha1(b"foo")
+ sha_b = sha_a.copy()
+ write_pack_object(
+ f.write, Blob.type_num, b"blob", sha=sha_a, compression_level=6
+ )
+ self.assertNotEqual(sha_a.digest(), sha_b.digest())
+ sha_b.update(f.getvalue()[offset:])
+ self.assertEqual(sha_a.digest(), sha_b.digest())
+
+
+pack_checksum = hex_to_sha("721980e866af9a5f93ad674144e1459b8ba3e7b7")
+
+
+class BaseTestPackIndexWriting:
+ def assertSucceeds(self, func, *args, **kwargs):
+ try:
+ func(*args, **kwargs)
+ except ChecksumMismatch as e:
+ self.fail(e)
+
+ def index(self, filename, entries, pack_checksum):
+ raise NotImplementedError(self.index)
+
+ def test_empty(self):
+ idx = self.index("empty.idx", [], pack_checksum)
+ self.assertEqual(idx.get_pack_checksum(), pack_checksum)
+ self.assertEqual(0, len(idx))
+
+ def test_large(self):
+ entry1_sha = hex_to_sha("4e6388232ec39792661e2e75db8fb117fc869ce6")
+ entry2_sha = hex_to_sha("e98f071751bd77f59967bfa671cd2caebdccc9a2")
+ entries = [
+ (entry1_sha, 0xF2972D0830529B87, 24),
+ (entry2_sha, (~0xF2972D0830529B87) & (2**64 - 1), 92),
+ ]
+ if not self._supports_large:
+ self.assertRaises(
+ TypeError, self.index, "single.idx", entries, pack_checksum
+ )
+ return
+ idx = self.index("single.idx", entries, pack_checksum)
+ self.assertEqual(idx.get_pack_checksum(), pack_checksum)
+ self.assertEqual(2, len(idx))
+ actual_entries = list(idx.iterentries())
+ self.assertEqual(len(entries), len(actual_entries))
+ for mine, actual in zip(entries, actual_entries):
+ my_sha, my_offset, my_crc = mine
+ actual_sha, actual_offset, actual_crc = actual
+ self.assertEqual(my_sha, actual_sha)
+ self.assertEqual(my_offset, actual_offset)
+ if self._has_crc32_checksum:
+ self.assertEqual(my_crc, actual_crc)
+ else:
+ self.assertIsNone(actual_crc)
+
+ def test_single(self):
+ entry_sha = hex_to_sha("6f670c0fb53f9463760b7295fbb814e965fb20c8")
+ my_entries = [(entry_sha, 178, 42)]
+ idx = self.index("single.idx", my_entries, pack_checksum)
+ self.assertEqual(idx.get_pack_checksum(), pack_checksum)
+ self.assertEqual(1, len(idx))
+ actual_entries = list(idx.iterentries())
+ self.assertEqual(len(my_entries), len(actual_entries))
+ for mine, actual in zip(my_entries, actual_entries):
+ my_sha, my_offset, my_crc = mine
+ actual_sha, actual_offset, actual_crc = actual
+ self.assertEqual(my_sha, actual_sha)
+ self.assertEqual(my_offset, actual_offset)
+ if self._has_crc32_checksum:
+ self.assertEqual(my_crc, actual_crc)
+ else:
+ self.assertIsNone(actual_crc)
+
+
+class BaseTestFilePackIndexWriting(BaseTestPackIndexWriting):
+ def setUp(self):
+ self.tempdir = tempfile.mkdtemp()
+
+ def tearDown(self):
+ shutil.rmtree(self.tempdir)
+
+ def index(self, filename, entries, pack_checksum):
+ path = os.path.join(self.tempdir, filename)
+ self.writeIndex(path, entries, pack_checksum)
+ idx = load_pack_index(path)
+ self.assertSucceeds(idx.check)
+ self.assertEqual(idx.version, self._expected_version)
+ return idx
+
+ def writeIndex(self, filename, entries, pack_checksum):
+ # FIXME: Write to BytesIO instead rather than hitting disk ?
+ with GitFile(filename, "wb") as f:
+ self._write_fn(f, entries, pack_checksum)
+
+
+class TestMemoryIndexWriting(TestCase, BaseTestPackIndexWriting):
+ def setUp(self):
+ TestCase.setUp(self)
+ self._has_crc32_checksum = True
+ self._supports_large = True
+
+ def index(self, filename, entries, pack_checksum):
+ return MemoryPackIndex(entries, pack_checksum)
+
+ def tearDown(self):
+ TestCase.tearDown(self)
+
+
+class TestPackIndexWritingv1(TestCase, BaseTestFilePackIndexWriting):
+ def setUp(self):
+ TestCase.setUp(self)
+ BaseTestFilePackIndexWriting.setUp(self)
+ self._has_crc32_checksum = False
+ self._expected_version = 1
+ self._supports_large = False
+ self._write_fn = write_pack_index_v1
+
+ def tearDown(self):
+ TestCase.tearDown(self)
+ BaseTestFilePackIndexWriting.tearDown(self)
+
+
+class TestPackIndexWritingv2(TestCase, BaseTestFilePackIndexWriting):
+ def setUp(self):
+ TestCase.setUp(self)
+ BaseTestFilePackIndexWriting.setUp(self)
+ self._has_crc32_checksum = True
+ self._supports_large = True
+ self._expected_version = 2
+ self._write_fn = write_pack_index_v2
+
+ def tearDown(self):
+ TestCase.tearDown(self)
+ BaseTestFilePackIndexWriting.tearDown(self)
+
+
+class ReadZlibTests(TestCase):
+ decomp = (
+ b"tree 4ada885c9196b6b6fa08744b5862bf92896fc002\n"
+ b"parent None\n"
+ b"author Jelmer Vernooij <jelmer@samba.org> 1228980214 +0000\n"
+ b"committer Jelmer Vernooij <jelmer@samba.org> 1228980214 +0000\n"
+ b"\n"
+ b"Provide replacement for mmap()'s offset argument."
+ )
+ comp = zlib.compress(decomp)
+ extra = b"nextobject"
+
+ def setUp(self):
+ super().setUp()
+ self.read = BytesIO(self.comp + self.extra).read
+ self.unpacked = UnpackedObject(
+ Tree.type_num, decomp_len=len(self.decomp), crc32=0
+ )
+
+ def test_decompress_size(self):
+ good_decomp_len = len(self.decomp)
+ self.unpacked.decomp_len = -1
+ self.assertRaises(ValueError, read_zlib_chunks, self.read, self.unpacked)
+ self.unpacked.decomp_len = good_decomp_len - 1
+ self.assertRaises(zlib.error, read_zlib_chunks, self.read, self.unpacked)
+ self.unpacked.decomp_len = good_decomp_len + 1
+ self.assertRaises(zlib.error, read_zlib_chunks, self.read, self.unpacked)
+
+ def test_decompress_truncated(self):
+ read = BytesIO(self.comp[:10]).read
+ self.assertRaises(zlib.error, read_zlib_chunks, read, self.unpacked)
+
+ read = BytesIO(self.comp).read
+ self.assertRaises(zlib.error, read_zlib_chunks, read, self.unpacked)
+
+ def test_decompress_empty(self):
+ unpacked = UnpackedObject(Tree.type_num, decomp_len=0)
+ comp = zlib.compress(b"")
+ read = BytesIO(comp + self.extra).read
+ unused = read_zlib_chunks(read, unpacked)
+ self.assertEqual(b"", b"".join(unpacked.decomp_chunks))
+ self.assertNotEqual(b"", unused)
+ self.assertEqual(self.extra, unused + read())
+
+ def test_decompress_no_crc32(self):
+ self.unpacked.crc32 = None
+ read_zlib_chunks(self.read, self.unpacked)
+ self.assertEqual(None, self.unpacked.crc32)
+
+ def _do_decompress_test(self, buffer_size, **kwargs):
+ unused = read_zlib_chunks(
+ self.read, self.unpacked, buffer_size=buffer_size, **kwargs
+ )
+ self.assertEqual(self.decomp, b"".join(self.unpacked.decomp_chunks))
+ self.assertEqual(zlib.crc32(self.comp), self.unpacked.crc32)
+ self.assertNotEqual(b"", unused)
+ self.assertEqual(self.extra, unused + self.read())
+
+ def test_simple_decompress(self):
+ self._do_decompress_test(4096)
+ self.assertEqual(None, self.unpacked.comp_chunks)
+
+ # These buffer sizes are not intended to be realistic, but rather simulate
+ # larger buffer sizes that may end at various places.
+ def test_decompress_buffer_size_1(self):
+ self._do_decompress_test(1)
+
+ def test_decompress_buffer_size_2(self):
+ self._do_decompress_test(2)
+
+ def test_decompress_buffer_size_3(self):
+ self._do_decompress_test(3)
+
+ def test_decompress_buffer_size_4(self):
+ self._do_decompress_test(4)
+
+ def test_decompress_include_comp(self):
+ self._do_decompress_test(4096, include_comp=True)
+ self.assertEqual(self.comp, b"".join(self.unpacked.comp_chunks))
+
+
+class DeltifyTests(TestCase):
+ def test_empty(self):
+ self.assertEqual([], list(deltify_pack_objects([])))
+
+ def test_single(self):
+ b = Blob.from_string(b"foo")
+ self.assertEqual(
+ [
+ UnpackedObject(
+ b.type_num,
+ sha=b.sha().digest(),
+ delta_base=None,
+ decomp_chunks=b.as_raw_chunks(),
+ )
+ ],
+ list(deltify_pack_objects([(b, b"")])),
+ )
+
+ def test_simple_delta(self):
+ b1 = Blob.from_string(b"a" * 101)
+ b2 = Blob.from_string(b"a" * 100)
+ delta = list(create_delta(b1.as_raw_chunks(), b2.as_raw_chunks()))
+ self.assertEqual(
+ [
+ UnpackedObject(
+ b1.type_num,
+ sha=b1.sha().digest(),
+ delta_base=None,
+ decomp_chunks=b1.as_raw_chunks(),
+ ),
+ UnpackedObject(
+ b2.type_num,
+ sha=b2.sha().digest(),
+ delta_base=b1.sha().digest(),
+ decomp_chunks=delta,
+ ),
+ ],
+ list(deltify_pack_objects([(b1, b""), (b2, b"")])),
+ )
+
+
+class TestPackStreamReader(TestCase):
+ def test_read_objects_emtpy(self):
+ f = BytesIO()
+ build_pack(f, [])
+ reader = PackStreamReader(f.read)
+ self.assertEqual(0, len(list(reader.read_objects())))
+
+ def test_read_objects(self):
+ f = BytesIO()
+ entries = build_pack(
+ f,
+ [
+ (Blob.type_num, b"blob"),
+ (OFS_DELTA, (0, b"blob1")),
+ ],
+ )
+ reader = PackStreamReader(f.read)
+ objects = list(reader.read_objects(compute_crc32=True))
+ self.assertEqual(2, len(objects))
+
+ unpacked_blob, unpacked_delta = objects
+
+ self.assertEqual(entries[0][0], unpacked_blob.offset)
+ self.assertEqual(Blob.type_num, unpacked_blob.pack_type_num)
+ self.assertEqual(Blob.type_num, unpacked_blob.obj_type_num)
+ self.assertEqual(None, unpacked_blob.delta_base)
+ self.assertEqual(b"blob", b"".join(unpacked_blob.decomp_chunks))
+ self.assertEqual(entries[0][4], unpacked_blob.crc32)
+
+ self.assertEqual(entries[1][0], unpacked_delta.offset)
+ self.assertEqual(OFS_DELTA, unpacked_delta.pack_type_num)
+ self.assertEqual(None, unpacked_delta.obj_type_num)
+ self.assertEqual(
+ unpacked_delta.offset - unpacked_blob.offset,
+ unpacked_delta.delta_base,
+ )
+ delta = create_delta(b"blob", b"blob1")
+ self.assertEqual(b"".join(delta), b"".join(unpacked_delta.decomp_chunks))
+ self.assertEqual(entries[1][4], unpacked_delta.crc32)
+
+ def test_read_objects_buffered(self):
+ f = BytesIO()
+ build_pack(
+ f,
+ [
+ (Blob.type_num, b"blob"),
+ (OFS_DELTA, (0, b"blob1")),
+ ],
+ )
+ reader = PackStreamReader(f.read, zlib_bufsize=4)
+ self.assertEqual(2, len(list(reader.read_objects())))
+
+ def test_read_objects_empty(self):
+ reader = PackStreamReader(BytesIO().read)
+ self.assertRaises(AssertionError, list, reader.read_objects())
+
+
+class TestPackIterator(DeltaChainIterator):
+ _compute_crc32 = True
+
+ def __init__(self, *args, **kwargs) -> None:
+ super().__init__(*args, **kwargs)
+ self._unpacked_offsets: Set[int] = set()
+
+ def _result(self, unpacked):
+ """Return entries in the same format as build_pack."""
+ return (
+ unpacked.offset,
+ unpacked.obj_type_num,
+ b"".join(unpacked.obj_chunks),
+ unpacked.sha(),
+ unpacked.crc32,
+ )
+
+ def _resolve_object(self, offset, pack_type_num, base_chunks):
+ assert offset not in self._unpacked_offsets, (
+ "Attempted to re-inflate offset %i" % offset
+ )
+ self._unpacked_offsets.add(offset)
+ return super()._resolve_object(offset, pack_type_num, base_chunks)
+
+
+class DeltaChainIteratorTests(TestCase):
+ def setUp(self):
+ super().setUp()
+ self.store = MemoryObjectStore()
+ self.fetched = set()
+
+ def store_blobs(self, blobs_data):
+ blobs = []
+ for data in blobs_data:
+ blob = make_object(Blob, data=data)
+ blobs.append(blob)
+ self.store.add_object(blob)
+ return blobs
+
+ def get_raw_no_repeat(self, bin_sha):
+ """Wrapper around store.get_raw that doesn't allow repeat lookups."""
+ hex_sha = sha_to_hex(bin_sha)
+ self.assertNotIn(
+ hex_sha, self.fetched, "Attempted to re-fetch object %s" % hex_sha
+ )
+ self.fetched.add(hex_sha)
+ return self.store.get_raw(hex_sha)
+
+ def make_pack_iter(self, f, thin=None):
+ if thin is None:
+ thin = bool(list(self.store))
+ resolve_ext_ref = thin and self.get_raw_no_repeat or None
+ data = PackData("test.pack", file=f)
+ return TestPackIterator.for_pack_data(data, resolve_ext_ref=resolve_ext_ref)
+
+ def make_pack_iter_subset(self, f, subset, thin=None):
+ if thin is None:
+ thin = bool(list(self.store))
+ resolve_ext_ref = thin and self.get_raw_no_repeat or None
+ data = PackData("test.pack", file=f)
+ assert data
+ index = MemoryPackIndex.for_pack(data)
+ pack = Pack.from_objects(data, index)
+ return TestPackIterator.for_pack_subset(
+ pack, subset, resolve_ext_ref=resolve_ext_ref
+ )
+
+ def assertEntriesMatch(self, expected_indexes, entries, pack_iter):
+ expected = [entries[i] for i in expected_indexes]
+ self.assertEqual(expected, list(pack_iter._walk_all_chains()))
+
+ def test_no_deltas(self):
+ f = BytesIO()
+ entries = build_pack(
+ f,
+ [
+ (Commit.type_num, b"commit"),
+ (Blob.type_num, b"blob"),
+ (Tree.type_num, b"tree"),
+ ],
+ )
+ self.assertEntriesMatch([0, 1, 2], entries, self.make_pack_iter(f))
+ f.seek(0)
+ self.assertEntriesMatch([], entries, self.make_pack_iter_subset(f, []))
+ f.seek(0)
+ self.assertEntriesMatch(
+ [1, 0],
+ entries,
+ self.make_pack_iter_subset(f, [entries[0][3], entries[1][3]]),
+ )
+ f.seek(0)
+ self.assertEntriesMatch(
+ [1, 0],
+ entries,
+ self.make_pack_iter_subset(
+ f, [sha_to_hex(entries[0][3]), sha_to_hex(entries[1][3])]
+ ),
+ )
+
+ def test_ofs_deltas(self):
+ f = BytesIO()
+ entries = build_pack(
+ f,
+ [
+ (Blob.type_num, b"blob"),
+ (OFS_DELTA, (0, b"blob1")),
+ (OFS_DELTA, (0, b"blob2")),
+ ],
+ )
+ # Delta resolution changed to DFS
+ self.assertEntriesMatch([0, 2, 1], entries, self.make_pack_iter(f))
+ f.seek(0)
+ self.assertEntriesMatch(
+ [0, 2, 1],
+ entries,
+ self.make_pack_iter_subset(f, [entries[1][3], entries[2][3]]),
+ )
+
+ def test_ofs_deltas_chain(self):
+ f = BytesIO()
+ entries = build_pack(
+ f,
+ [
+ (Blob.type_num, b"blob"),
+ (OFS_DELTA, (0, b"blob1")),
+ (OFS_DELTA, (1, b"blob2")),
+ ],
+ )
+ self.assertEntriesMatch([0, 1, 2], entries, self.make_pack_iter(f))
+
+ def test_ref_deltas(self):
+ f = BytesIO()
+ entries = build_pack(
+ f,
+ [
+ (REF_DELTA, (1, b"blob1")),
+ (Blob.type_num, (b"blob")),
+ (REF_DELTA, (1, b"blob2")),
+ ],
+ )
+ # Delta resolution changed to DFS
+ self.assertEntriesMatch([1, 2, 0], entries, self.make_pack_iter(f))
+
+ def test_ref_deltas_chain(self):
+ f = BytesIO()
+ entries = build_pack(
+ f,
+ [
+ (REF_DELTA, (2, b"blob1")),
+ (Blob.type_num, (b"blob")),
+ (REF_DELTA, (1, b"blob2")),
+ ],
+ )
+ self.assertEntriesMatch([1, 2, 0], entries, self.make_pack_iter(f))
+
+ def test_ofs_and_ref_deltas(self):
+ # Deltas pending on this offset are popped before deltas depending on
+ # this ref.
+ f = BytesIO()
+ entries = build_pack(
+ f,
+ [
+ (REF_DELTA, (1, b"blob1")),
+ (Blob.type_num, (b"blob")),
+ (OFS_DELTA, (1, b"blob2")),
+ ],
+ )
+
+ # Delta resolution changed to DFS
+ self.assertEntriesMatch([1, 0, 2], entries, self.make_pack_iter(f))
+
+ def test_mixed_chain(self):
+ f = BytesIO()
+ entries = build_pack(
+ f,
+ [
+ (Blob.type_num, b"blob"),
+ (REF_DELTA, (2, b"blob2")),
+ (OFS_DELTA, (0, b"blob1")),
+ (OFS_DELTA, (1, b"blob3")),
+ (OFS_DELTA, (0, b"bob")),
+ ],
+ )
+ # Delta resolution changed to DFS
+ self.assertEntriesMatch([0, 4, 2, 1, 3], entries, self.make_pack_iter(f))
+
+ def test_long_chain(self):
+ n = 100
+ objects_spec = [(Blob.type_num, b"blob")]
+ for i in range(n):
+ objects_spec.append((OFS_DELTA, (i, b"blob" + str(i).encode("ascii"))))
+ f = BytesIO()
+ entries = build_pack(f, objects_spec)
+ self.assertEntriesMatch(range(n + 1), entries, self.make_pack_iter(f))
+
+ def test_branchy_chain(self):
+ n = 100
+ objects_spec = [(Blob.type_num, b"blob")]
+ for i in range(n):
+ objects_spec.append((OFS_DELTA, (0, b"blob" + str(i).encode("ascii"))))
+ f = BytesIO()
+ entries = build_pack(f, objects_spec)
+ # Delta resolution changed to DFS
+ indices = [0, *list(range(100, 0, -1))]
+ self.assertEntriesMatch(indices, entries, self.make_pack_iter(f))
+
+ def test_ext_ref(self):
+ (blob,) = self.store_blobs([b"blob"])
+ f = BytesIO()
+ entries = build_pack(f, [(REF_DELTA, (blob.id, b"blob1"))], store=self.store)
+ pack_iter = self.make_pack_iter(f)
+ self.assertEntriesMatch([0], entries, pack_iter)
+ self.assertEqual([hex_to_sha(blob.id)], pack_iter.ext_refs())
+
+ def test_ext_ref_chain(self):
+ (blob,) = self.store_blobs([b"blob"])
+ f = BytesIO()
+ entries = build_pack(
+ f,
+ [
+ (REF_DELTA, (1, b"blob2")),
+ (REF_DELTA, (blob.id, b"blob1")),
+ ],
+ store=self.store,
+ )
+ pack_iter = self.make_pack_iter(f)
+ self.assertEntriesMatch([1, 0], entries, pack_iter)
+ self.assertEqual([hex_to_sha(blob.id)], pack_iter.ext_refs())
+
+ def test_ext_ref_chain_degenerate(self):
+ # Test a degenerate case where the sender is sending a REF_DELTA
+ # object that expands to an object already in the repository.
+ (blob,) = self.store_blobs([b"blob"])
+ (blob2,) = self.store_blobs([b"blob2"])
+ assert blob.id < blob2.id
+
+ f = BytesIO()
+ entries = build_pack(
+ f,
+ [
+ (REF_DELTA, (blob.id, b"blob2")),
+ (REF_DELTA, (0, b"blob3")),
+ ],
+ store=self.store,
+ )
+ pack_iter = self.make_pack_iter(f)
+ self.assertEntriesMatch([0, 1], entries, pack_iter)
+ self.assertEqual([hex_to_sha(blob.id)], pack_iter.ext_refs())
+
+ def test_ext_ref_multiple_times(self):
+ (blob,) = self.store_blobs([b"blob"])
+ f = BytesIO()
+ entries = build_pack(
+ f,
+ [
+ (REF_DELTA, (blob.id, b"blob1")),
+ (REF_DELTA, (blob.id, b"blob2")),
+ ],
+ store=self.store,
+ )
+ pack_iter = self.make_pack_iter(f)
+ self.assertEntriesMatch([0, 1], entries, pack_iter)
+ self.assertEqual([hex_to_sha(blob.id)], pack_iter.ext_refs())
+
+ def test_multiple_ext_refs(self):
+ b1, b2 = self.store_blobs([b"foo", b"bar"])
+ f = BytesIO()
+ entries = build_pack(
+ f,
+ [
+ (REF_DELTA, (b1.id, b"foo1")),
+ (REF_DELTA, (b2.id, b"bar2")),
+ ],
+ store=self.store,
+ )
+ pack_iter = self.make_pack_iter(f)
+ self.assertEntriesMatch([0, 1], entries, pack_iter)
+ self.assertEqual([hex_to_sha(b1.id), hex_to_sha(b2.id)], pack_iter.ext_refs())
+
+ def test_bad_ext_ref_non_thin_pack(self):
+ (blob,) = self.store_blobs([b"blob"])
+ f = BytesIO()
+ build_pack(f, [(REF_DELTA, (blob.id, b"blob1"))], store=self.store)
+ pack_iter = self.make_pack_iter(f, thin=False)
+ try:
+ list(pack_iter._walk_all_chains())
+ self.fail()
+ except UnresolvedDeltas as e:
+ self.assertEqual([blob.id], e.shas)
+
+ def test_bad_ext_ref_thin_pack(self):
+ b1, b2, b3 = self.store_blobs([b"foo", b"bar", b"baz"])
+ f = BytesIO()
+ build_pack(
+ f,
+ [
+ (REF_DELTA, (1, b"foo99")),
+ (REF_DELTA, (b1.id, b"foo1")),
+ (REF_DELTA, (b2.id, b"bar2")),
+ (REF_DELTA, (b3.id, b"baz3")),
+ ],
+ store=self.store,
+ )
+ del self.store[b2.id]
+ del self.store[b3.id]
+ pack_iter = self.make_pack_iter(f)
+ try:
+ list(pack_iter._walk_all_chains())
+ self.fail()
+ except UnresolvedDeltas as e:
+ self.assertEqual((sorted([b2.id, b3.id]),), (sorted(e.shas),))
+
+
+class DeltaEncodeSizeTests(TestCase):
+ def test_basic(self):
+ self.assertEqual(b"\x00", _delta_encode_size(0))
+ self.assertEqual(b"\x01", _delta_encode_size(1))
+ self.assertEqual(b"\xfa\x01", _delta_encode_size(250))
+ self.assertEqual(b"\xe8\x07", _delta_encode_size(1000))
+ self.assertEqual(b"\xa0\x8d\x06", _delta_encode_size(100000))
+
+
+class EncodeCopyOperationTests(TestCase):
+ def test_basic(self):
+ self.assertEqual(b"\x80", _encode_copy_operation(0, 0))
+ self.assertEqual(b"\x91\x01\x0a", _encode_copy_operation(1, 10))
+ self.assertEqual(b"\xb1\x64\xe8\x03", _encode_copy_operation(100, 1000))
+ self.assertEqual(b"\x93\xe8\x03\x01", _encode_copy_operation(1000, 1))
blob - /dev/null
blob + 35e90d95d3cbba9850794f15969ed8cb4815231f (mode 644)
--- /dev/null
+++ tests/test_patch.py
+# test_patch.py -- tests for patch.py
+# Copyright (C) 2010 Jelmer Vernooij <jelmer@jelmer.uk>
+#
+# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
+# General Public License as public by the Free Software Foundation; version 2.0
+# or (at your option) any later version. You can redistribute it and/or
+# modify it under the terms of either of these two licenses.
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# You should have received a copy of the licenses; if not, see
+# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
+# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
+# License, Version 2.0.
+#
+
+"""Tests for patch.py."""
+
+from io import BytesIO, StringIO
+
+from dulwich.object_store import MemoryObjectStore
+from dulwich.objects import S_IFGITLINK, Blob, Commit, Tree
+from dulwich.patch import (
+ get_summary,
+ git_am_patch_split,
+ write_blob_diff,
+ write_commit_patch,
+ write_object_diff,
+ write_tree_diff,
+)
+
+from . import SkipTest, TestCase
+
+
+class WriteCommitPatchTests(TestCase):
+ def test_simple_bytesio(self):
+ f = BytesIO()
+ c = Commit()
+ c.committer = c.author = b"Jelmer <jelmer@samba.org>"
+ c.commit_time = c.author_time = 1271350201
+ c.commit_timezone = c.author_timezone = 0
+ c.message = b"This is the first line\nAnd this is the second line.\n"
+ c.tree = Tree().id
+ write_commit_patch(f, c, b"CONTENTS", (1, 1), version="custom")
+ f.seek(0)
+ lines = f.readlines()
+ self.assertTrue(
+ lines[0].startswith(b"From 0b0d34d1b5b596c928adc9a727a4b9e03d025298")
+ )
+ self.assertEqual(lines[1], b"From: Jelmer <jelmer@samba.org>\n")
+ self.assertTrue(lines[2].startswith(b"Date: "))
+ self.assertEqual(
+ [
+ b"Subject: [PATCH 1/1] This is the first line\n",
+ b"And this is the second line.\n",
+ b"\n",
+ b"\n",
+ b"---\n",
+ ],
+ lines[3:8],
+ )
+ self.assertEqual([b"CONTENTS-- \n", b"custom\n"], lines[-2:])
+ if len(lines) >= 12:
+ # diffstat may not be present
+ self.assertEqual(lines[8], b" 0 files changed\n")
+
+
+class ReadGitAmPatch(TestCase):
+ def test_extract_string(self):
+ text = b"""\
+From ff643aae102d8870cac88e8f007e70f58f3a7363 Mon Sep 17 00:00:00 2001
+From: Jelmer Vernooij <jelmer@samba.org>
+Date: Thu, 15 Apr 2010 15:40:28 +0200
+Subject: [PATCH 1/2] Remove executable bit from prey.ico (triggers a warning).
+
+---
+ pixmaps/prey.ico | Bin 9662 -> 9662 bytes
+ 1 files changed, 0 insertions(+), 0 deletions(-)
+ mode change 100755 => 100644 pixmaps/prey.ico
+
+--
+1.7.0.4
+"""
+ c, diff, version = git_am_patch_split(StringIO(text.decode("utf-8")), "utf-8")
+ self.assertEqual(b"Jelmer Vernooij <jelmer@samba.org>", c.committer)
+ self.assertEqual(b"Jelmer Vernooij <jelmer@samba.org>", c.author)
+ self.assertEqual(
+ b"Remove executable bit from prey.ico " b"(triggers a warning).\n",
+ c.message,
+ )
+ self.assertEqual(
+ b""" pixmaps/prey.ico | Bin 9662 -> 9662 bytes
+ 1 files changed, 0 insertions(+), 0 deletions(-)
+ mode change 100755 => 100644 pixmaps/prey.ico
+
+""",
+ diff,
+ )
+ self.assertEqual(b"1.7.0.4", version)
+
+ def test_extract_bytes(self):
+ text = b"""\
+From ff643aae102d8870cac88e8f007e70f58f3a7363 Mon Sep 17 00:00:00 2001
+From: Jelmer Vernooij <jelmer@samba.org>
+Date: Thu, 15 Apr 2010 15:40:28 +0200
+Subject: [PATCH 1/2] Remove executable bit from prey.ico (triggers a warning).
+
+---
+ pixmaps/prey.ico | Bin 9662 -> 9662 bytes
+ 1 files changed, 0 insertions(+), 0 deletions(-)
+ mode change 100755 => 100644 pixmaps/prey.ico
+
+--
+1.7.0.4
+"""
+ c, diff, version = git_am_patch_split(BytesIO(text))
+ self.assertEqual(b"Jelmer Vernooij <jelmer@samba.org>", c.committer)
+ self.assertEqual(b"Jelmer Vernooij <jelmer@samba.org>", c.author)
+ self.assertEqual(
+ b"Remove executable bit from prey.ico " b"(triggers a warning).\n",
+ c.message,
+ )
+ self.assertEqual(
+ b""" pixmaps/prey.ico | Bin 9662 -> 9662 bytes
+ 1 files changed, 0 insertions(+), 0 deletions(-)
+ mode change 100755 => 100644 pixmaps/prey.ico
+
+""",
+ diff,
+ )
+ self.assertEqual(b"1.7.0.4", version)
+
+ def test_extract_spaces(self):
+ text = b"""From ff643aae102d8870cac88e8f007e70f58f3a7363 Mon Sep 17 00:00:00 2001
+From: Jelmer Vernooij <jelmer@samba.org>
+Date: Thu, 15 Apr 2010 15:40:28 +0200
+Subject: [Dulwich-users] [PATCH] Added unit tests for
+ dulwich.object_store.tree_lookup_path.
+
+* dulwich/tests/test_object_store.py
+ (TreeLookupPathTests): This test case contains a few tests that ensure the
+ tree_lookup_path function works as expected.
+---
+ pixmaps/prey.ico | Bin 9662 -> 9662 bytes
+ 1 files changed, 0 insertions(+), 0 deletions(-)
+ mode change 100755 => 100644 pixmaps/prey.ico
+
+--
+1.7.0.4
+"""
+ c, diff, version = git_am_patch_split(BytesIO(text), "utf-8")
+ self.assertEqual(
+ b"""\
+Added unit tests for dulwich.object_store.tree_lookup_path.
+
+* dulwich/tests/test_object_store.py
+ (TreeLookupPathTests): This test case contains a few tests that ensure the
+ tree_lookup_path function works as expected.
+""",
+ c.message,
+ )
+
+ def test_extract_pseudo_from_header(self):
+ text = b"""From ff643aae102d8870cac88e8f007e70f58f3a7363 Mon Sep 17 00:00:00 2001
+From: Jelmer Vernooij <jelmer@samba.org>
+Date: Thu, 15 Apr 2010 15:40:28 +0200
+Subject: [Dulwich-users] [PATCH] Added unit tests for
+ dulwich.object_store.tree_lookup_path.
+
+From: Jelmer Vernooij <jelmer@debian.org>
+
+* dulwich/tests/test_object_store.py
+ (TreeLookupPathTests): This test case contains a few tests that ensure the
+ tree_lookup_path function works as expected.
+---
+ pixmaps/prey.ico | Bin 9662 -> 9662 bytes
+ 1 files changed, 0 insertions(+), 0 deletions(-)
+ mode change 100755 => 100644 pixmaps/prey.ico
+
+--
+1.7.0.4
+"""
+ c, diff, version = git_am_patch_split(BytesIO(text), "utf-8")
+ self.assertEqual(b"Jelmer Vernooij <jelmer@debian.org>", c.author)
+ self.assertEqual(
+ b"""\
+Added unit tests for dulwich.object_store.tree_lookup_path.
+
+* dulwich/tests/test_object_store.py
+ (TreeLookupPathTests): This test case contains a few tests that ensure the
+ tree_lookup_path function works as expected.
+""",
+ c.message,
+ )
+
+ def test_extract_no_version_tail(self):
+ text = b"""\
+From ff643aae102d8870cac88e8f007e70f58f3a7363 Mon Sep 17 00:00:00 2001
+From: Jelmer Vernooij <jelmer@samba.org>
+Date: Thu, 15 Apr 2010 15:40:28 +0200
+Subject: [Dulwich-users] [PATCH] Added unit tests for
+ dulwich.object_store.tree_lookup_path.
+
+From: Jelmer Vernooij <jelmer@debian.org>
+
+---
+ pixmaps/prey.ico | Bin 9662 -> 9662 bytes
+ 1 files changed, 0 insertions(+), 0 deletions(-)
+ mode change 100755 => 100644 pixmaps/prey.ico
+
+"""
+ c, diff, version = git_am_patch_split(BytesIO(text), "utf-8")
+ self.assertEqual(None, version)
+
+ def test_extract_mercurial(self):
+ raise SkipTest(
+ "git_am_patch_split doesn't handle Mercurial patches " "properly yet"
+ )
+ expected_diff = """\
+diff --git a/dulwich/tests/test_patch.py b/dulwich/tests/test_patch.py
+--- a/dulwich/tests/test_patch.py
++++ b/dulwich/tests/test_patch.py
+@@ -158,7 +158,7 @@
+
+ '''
+ c, diff, version = git_am_patch_split(BytesIO(text))
+- self.assertIs(None, version)
++ self.assertEqual(None, version)
+
+
+ class DiffTests(TestCase):
+"""
+ text = (
+ """\
+From dulwich-users-bounces+jelmer=samba.org@lists.launchpad.net \
+Mon Nov 29 00:58:18 2010
+Date: Sun, 28 Nov 2010 17:57:27 -0600
+From: Augie Fackler <durin42@gmail.com>
+To: dulwich-users <dulwich-users@lists.launchpad.net>
+Subject: [Dulwich-users] [PATCH] test_patch: fix tests on Python 2.6
+Content-Transfer-Encoding: 8bit
+
+Change-Id: I5e51313d4ae3a65c3f00c665002a7489121bb0d6
+
+%s
+
+_______________________________________________
+Mailing list: https://launchpad.net/~dulwich-users
+Post to : dulwich-users@lists.launchpad.net
+Unsubscribe : https://launchpad.net/~dulwich-users
+More help : https://help.launchpad.net/ListHelp
+
+"""
+ % expected_diff
+ )
+ c, diff, version = git_am_patch_split(BytesIO(text))
+ self.assertEqual(expected_diff, diff)
+ self.assertEqual(None, version)
+
+
+class DiffTests(TestCase):
+ """Tests for write_blob_diff and write_tree_diff."""
+
+ def test_blob_diff(self):
+ f = BytesIO()
+ write_blob_diff(
+ f,
+ (b"foo.txt", 0o644, Blob.from_string(b"old\nsame\n")),
+ (b"bar.txt", 0o644, Blob.from_string(b"new\nsame\n")),
+ )
+ self.assertEqual(
+ [
+ b"diff --git a/foo.txt b/bar.txt",
+ b"index 3b0f961..a116b51 644",
+ b"--- a/foo.txt",
+ b"+++ b/bar.txt",
+ b"@@ -1,2 +1,2 @@",
+ b"-old",
+ b"+new",
+ b" same",
+ ],
+ f.getvalue().splitlines(),
+ )
+
+ def test_blob_add(self):
+ f = BytesIO()
+ write_blob_diff(
+ f,
+ (None, None, None),
+ (b"bar.txt", 0o644, Blob.from_string(b"new\nsame\n")),
+ )
+ self.assertEqual(
+ [
+ b"diff --git a/bar.txt b/bar.txt",
+ b"new file mode 644",
+ b"index 0000000..a116b51",
+ b"--- /dev/null",
+ b"+++ b/bar.txt",
+ b"@@ -0,0 +1,2 @@",
+ b"+new",
+ b"+same",
+ ],
+ f.getvalue().splitlines(),
+ )
+
+ def test_blob_remove(self):
+ f = BytesIO()
+ write_blob_diff(
+ f,
+ (b"bar.txt", 0o644, Blob.from_string(b"new\nsame\n")),
+ (None, None, None),
+ )
+ self.assertEqual(
+ [
+ b"diff --git a/bar.txt b/bar.txt",
+ b"deleted file mode 644",
+ b"index a116b51..0000000",
+ b"--- a/bar.txt",
+ b"+++ /dev/null",
+ b"@@ -1,2 +0,0 @@",
+ b"-new",
+ b"-same",
+ ],
+ f.getvalue().splitlines(),
+ )
+
+ def test_tree_diff(self):
+ f = BytesIO()
+ store = MemoryObjectStore()
+ added = Blob.from_string(b"add\n")
+ removed = Blob.from_string(b"removed\n")
+ changed1 = Blob.from_string(b"unchanged\nremoved\n")
+ changed2 = Blob.from_string(b"unchanged\nadded\n")
+ unchanged = Blob.from_string(b"unchanged\n")
+ tree1 = Tree()
+ tree1.add(b"removed.txt", 0o644, removed.id)
+ tree1.add(b"changed.txt", 0o644, changed1.id)
+ tree1.add(b"unchanged.txt", 0o644, changed1.id)
+ tree2 = Tree()
+ tree2.add(b"added.txt", 0o644, added.id)
+ tree2.add(b"changed.txt", 0o644, changed2.id)
+ tree2.add(b"unchanged.txt", 0o644, changed1.id)
+ store.add_objects(
+ [
+ (o, None)
+ for o in [
+ tree1,
+ tree2,
+ added,
+ removed,
+ changed1,
+ changed2,
+ unchanged,
+ ]
+ ]
+ )
+ write_tree_diff(f, store, tree1.id, tree2.id)
+ self.assertEqual(
+ [
+ b"diff --git a/added.txt b/added.txt",
+ b"new file mode 644",
+ b"index 0000000..76d4bb8",
+ b"--- /dev/null",
+ b"+++ b/added.txt",
+ b"@@ -0,0 +1 @@",
+ b"+add",
+ b"diff --git a/changed.txt b/changed.txt",
+ b"index bf84e48..1be2436 644",
+ b"--- a/changed.txt",
+ b"+++ b/changed.txt",
+ b"@@ -1,2 +1,2 @@",
+ b" unchanged",
+ b"-removed",
+ b"+added",
+ b"diff --git a/removed.txt b/removed.txt",
+ b"deleted file mode 644",
+ b"index 2c3f0b3..0000000",
+ b"--- a/removed.txt",
+ b"+++ /dev/null",
+ b"@@ -1 +0,0 @@",
+ b"-removed",
+ ],
+ f.getvalue().splitlines(),
+ )
+
+ def test_tree_diff_submodule(self):
+ f = BytesIO()
+ store = MemoryObjectStore()
+ tree1 = Tree()
+ tree1.add(
+ b"asubmodule",
+ S_IFGITLINK,
+ b"06d0bdd9e2e20377b3180e4986b14c8549b393e4",
+ )
+ tree2 = Tree()
+ tree2.add(
+ b"asubmodule",
+ S_IFGITLINK,
+ b"cc975646af69f279396d4d5e1379ac6af80ee637",
+ )
+ store.add_objects([(o, None) for o in [tree1, tree2]])
+ write_tree_diff(f, store, tree1.id, tree2.id)
+ self.assertEqual(
+ [
+ b"diff --git a/asubmodule b/asubmodule",
+ b"index 06d0bdd..cc97564 160000",
+ b"--- a/asubmodule",
+ b"+++ b/asubmodule",
+ b"@@ -1 +1 @@",
+ b"-Subproject commit 06d0bdd9e2e20377b3180e4986b14c8549b393e4",
+ b"+Subproject commit cc975646af69f279396d4d5e1379ac6af80ee637",
+ ],
+ f.getvalue().splitlines(),
+ )
+
+ def test_object_diff_blob(self):
+ f = BytesIO()
+ b1 = Blob.from_string(b"old\nsame\n")
+ b2 = Blob.from_string(b"new\nsame\n")
+ store = MemoryObjectStore()
+ store.add_objects([(b1, None), (b2, None)])
+ write_object_diff(
+ f, store, (b"foo.txt", 0o644, b1.id), (b"bar.txt", 0o644, b2.id)
+ )
+ self.assertEqual(
+ [
+ b"diff --git a/foo.txt b/bar.txt",
+ b"index 3b0f961..a116b51 644",
+ b"--- a/foo.txt",
+ b"+++ b/bar.txt",
+ b"@@ -1,2 +1,2 @@",
+ b"-old",
+ b"+new",
+ b" same",
+ ],
+ f.getvalue().splitlines(),
+ )
+
+ def test_object_diff_add_blob(self):
+ f = BytesIO()
+ store = MemoryObjectStore()
+ b2 = Blob.from_string(b"new\nsame\n")
+ store.add_object(b2)
+ write_object_diff(f, store, (None, None, None), (b"bar.txt", 0o644, b2.id))
+ self.assertEqual(
+ [
+ b"diff --git a/bar.txt b/bar.txt",
+ b"new file mode 644",
+ b"index 0000000..a116b51",
+ b"--- /dev/null",
+ b"+++ b/bar.txt",
+ b"@@ -0,0 +1,2 @@",
+ b"+new",
+ b"+same",
+ ],
+ f.getvalue().splitlines(),
+ )
+
+ def test_object_diff_remove_blob(self):
+ f = BytesIO()
+ b1 = Blob.from_string(b"new\nsame\n")
+ store = MemoryObjectStore()
+ store.add_object(b1)
+ write_object_diff(f, store, (b"bar.txt", 0o644, b1.id), (None, None, None))
+ self.assertEqual(
+ [
+ b"diff --git a/bar.txt b/bar.txt",
+ b"deleted file mode 644",
+ b"index a116b51..0000000",
+ b"--- a/bar.txt",
+ b"+++ /dev/null",
+ b"@@ -1,2 +0,0 @@",
+ b"-new",
+ b"-same",
+ ],
+ f.getvalue().splitlines(),
+ )
+
+ def test_object_diff_bin_blob_force(self):
+ f = BytesIO()
+ # Prepare two slightly different PNG headers
+ b1 = Blob.from_string(
+ b"\x89\x50\x4e\x47\x0d\x0a\x1a\x0a"
+ b"\x00\x00\x00\x0d\x49\x48\x44\x52"
+ b"\x00\x00\x01\xd5\x00\x00\x00\x9f"
+ b"\x08\x04\x00\x00\x00\x05\x04\x8b"
+ )
+ b2 = Blob.from_string(
+ b"\x89\x50\x4e\x47\x0d\x0a\x1a\x0a"
+ b"\x00\x00\x00\x0d\x49\x48\x44\x52"
+ b"\x00\x00\x01\xd5\x00\x00\x00\x9f"
+ b"\x08\x03\x00\x00\x00\x98\xd3\xb3"
+ )
+ store = MemoryObjectStore()
+ store.add_objects([(b1, None), (b2, None)])
+ write_object_diff(
+ f,
+ store,
+ (b"foo.png", 0o644, b1.id),
+ (b"bar.png", 0o644, b2.id),
+ diff_binary=True,
+ )
+ self.assertEqual(
+ [
+ b"diff --git a/foo.png b/bar.png",
+ b"index f73e47d..06364b7 644",
+ b"--- a/foo.png",
+ b"+++ b/bar.png",
+ b"@@ -1,4 +1,4 @@",
+ b" \x89PNG",
+ b" \x1a",
+ b" \x00\x00\x00",
+ b"-IHDR\x00\x00\x01\xd5\x00\x00\x00"
+ b"\x9f\x08\x04\x00\x00\x00\x05\x04\x8b",
+ b"\\ No newline at end of file",
+ b"+IHDR\x00\x00\x01\xd5\x00\x00\x00\x9f"
+ b"\x08\x03\x00\x00\x00\x98\xd3\xb3",
+ b"\\ No newline at end of file",
+ ],
+ f.getvalue().splitlines(),
+ )
+
+ def test_object_diff_bin_blob(self):
+ f = BytesIO()
+ # Prepare two slightly different PNG headers
+ b1 = Blob.from_string(
+ b"\x89\x50\x4e\x47\x0d\x0a\x1a\x0a"
+ b"\x00\x00\x00\x0d\x49\x48\x44\x52"
+ b"\x00\x00\x01\xd5\x00\x00\x00\x9f"
+ b"\x08\x04\x00\x00\x00\x05\x04\x8b"
+ )
+ b2 = Blob.from_string(
+ b"\x89\x50\x4e\x47\x0d\x0a\x1a\x0a"
+ b"\x00\x00\x00\x0d\x49\x48\x44\x52"
+ b"\x00\x00\x01\xd5\x00\x00\x00\x9f"
+ b"\x08\x03\x00\x00\x00\x98\xd3\xb3"
+ )
+ store = MemoryObjectStore()
+ store.add_objects([(b1, None), (b2, None)])
+ write_object_diff(
+ f, store, (b"foo.png", 0o644, b1.id), (b"bar.png", 0o644, b2.id)
+ )
+ self.assertEqual(
+ [
+ b"diff --git a/foo.png b/bar.png",
+ b"index f73e47d..06364b7 644",
+ b"Binary files a/foo.png and b/bar.png differ",
+ ],
+ f.getvalue().splitlines(),
+ )
+
+ def test_object_diff_add_bin_blob(self):
+ f = BytesIO()
+ b2 = Blob.from_string(
+ b"\x89\x50\x4e\x47\x0d\x0a\x1a\x0a"
+ b"\x00\x00\x00\x0d\x49\x48\x44\x52"
+ b"\x00\x00\x01\xd5\x00\x00\x00\x9f"
+ b"\x08\x03\x00\x00\x00\x98\xd3\xb3"
+ )
+ store = MemoryObjectStore()
+ store.add_object(b2)
+ write_object_diff(f, store, (None, None, None), (b"bar.png", 0o644, b2.id))
+ self.assertEqual(
+ [
+ b"diff --git a/bar.png b/bar.png",
+ b"new file mode 644",
+ b"index 0000000..06364b7",
+ b"Binary files /dev/null and b/bar.png differ",
+ ],
+ f.getvalue().splitlines(),
+ )
+
+ def test_object_diff_remove_bin_blob(self):
+ f = BytesIO()
+ b1 = Blob.from_string(
+ b"\x89\x50\x4e\x47\x0d\x0a\x1a\x0a"
+ b"\x00\x00\x00\x0d\x49\x48\x44\x52"
+ b"\x00\x00\x01\xd5\x00\x00\x00\x9f"
+ b"\x08\x04\x00\x00\x00\x05\x04\x8b"
+ )
+ store = MemoryObjectStore()
+ store.add_object(b1)
+ write_object_diff(f, store, (b"foo.png", 0o644, b1.id), (None, None, None))
+ self.assertEqual(
+ [
+ b"diff --git a/foo.png b/foo.png",
+ b"deleted file mode 644",
+ b"index f73e47d..0000000",
+ b"Binary files a/foo.png and /dev/null differ",
+ ],
+ f.getvalue().splitlines(),
+ )
+
+ def test_object_diff_kind_change(self):
+ f = BytesIO()
+ b1 = Blob.from_string(b"new\nsame\n")
+ store = MemoryObjectStore()
+ store.add_object(b1)
+ write_object_diff(
+ f,
+ store,
+ (b"bar.txt", 0o644, b1.id),
+ (
+ b"bar.txt",
+ 0o160000,
+ b"06d0bdd9e2e20377b3180e4986b14c8549b393e4",
+ ),
+ )
+ self.assertEqual(
+ [
+ b"diff --git a/bar.txt b/bar.txt",
+ b"old file mode 644",
+ b"new file mode 160000",
+ b"index a116b51..06d0bdd 160000",
+ b"--- a/bar.txt",
+ b"+++ b/bar.txt",
+ b"@@ -1,2 +1 @@",
+ b"-new",
+ b"-same",
+ b"+Subproject commit 06d0bdd9e2e20377b3180e4986b14c8549b393e4",
+ ],
+ f.getvalue().splitlines(),
+ )
+
+
+class GetSummaryTests(TestCase):
+ def test_simple(self):
+ c = Commit()
+ c.committer = c.author = b"Jelmer <jelmer@samba.org>"
+ c.commit_time = c.author_time = 1271350201
+ c.commit_timezone = c.author_timezone = 0
+ c.message = b"This is the first line\nAnd this is the second line.\n"
+ c.tree = Tree().id
+ self.assertEqual("This-is-the-first-line", get_summary(c))
blob - /dev/null
blob + 7a1c26f38ecff24e70b2549428e85f2e02d464da (mode 644)
--- /dev/null
+++ tests/test_porcelain.py
+# test_porcelain.py -- porcelain tests
+# Copyright (C) 2013 Jelmer Vernooij <jelmer@jelmer.uk>
+#
+# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
+# General Public License as public by the Free Software Foundation; version 2.0
+# or (at your option) any later version. You can redistribute it and/or
+# modify it under the terms of either of these two licenses.
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# You should have received a copy of the licenses; if not, see
+# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
+# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
+# License, Version 2.0.
+#
+
+"""Tests for dulwich.porcelain."""
+
+import contextlib
+import os
+import platform
+import re
+import shutil
+import stat
+import subprocess
+import sys
+import tarfile
+import tempfile
+import threading
+import time
+from io import BytesIO, StringIO
+from unittest import skipIf
+
+from dulwich import porcelain
+from dulwich.diff_tree import tree_changes
+from dulwich.errors import CommitError
+from dulwich.objects import ZERO_SHA, Blob, Tag, Tree
+from dulwich.porcelain import CheckoutError
+from dulwich.repo import NoIndexPresent, Repo
+from dulwich.server import DictBackend
+from dulwich.web import make_server, make_wsgi_chain
+
+from . import TestCase
+from .utils import build_commit_graph, make_commit, make_object
+
+try:
+ import gpg
+except ImportError:
+ gpg = None
+
+
+def flat_walk_dir(dir_to_walk):
+ for dirpath, _, filenames in os.walk(dir_to_walk):
+ rel_dirpath = os.path.relpath(dirpath, dir_to_walk)
+ if not dirpath == dir_to_walk:
+ yield rel_dirpath
+ for filename in filenames:
+ if dirpath == dir_to_walk:
+ yield filename
+ else:
+ yield os.path.join(rel_dirpath, filename)
+
+
+class PorcelainTestCase(TestCase):
+ def setUp(self):
+ super().setUp()
+ self.test_dir = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, self.test_dir)
+ self.repo_path = os.path.join(self.test_dir, "repo")
+ self.repo = Repo.init(self.repo_path, mkdir=True)
+ self.addCleanup(self.repo.close)
+
+ def assertRecentTimestamp(self, ts):
+ # On some slow CIs it does actually take more than 5 seconds to go from
+ # creating the tag to here.
+ self.assertLess(time.time() - ts, 50)
+
+
+@skipIf(gpg is None, "gpg is not available")
+class PorcelainGpgTestCase(PorcelainTestCase):
+ DEFAULT_KEY = """
+-----BEGIN PGP PRIVATE KEY BLOCK-----
+
+lQVYBGBjIyIBDADAwydvMPQqeEiK54FG1DHwT5sQejAaJOb+PsOhVa4fLcKsrO3F
+g5CxO+/9BHCXAr8xQAtp/gOhDN05fyK3MFyGlL9s+Cd8xf34S3R4rN/qbF0oZmaa
+FW0MuGnniq54HINs8KshadVn1Dhi/GYSJ588qNFRl/qxFTYAk+zaGsgX/QgFfy0f
+djWXJLypZXu9D6DlyJ0cPSzUlfBkI2Ytx6grzIquRjY0FbkjK3l+iGsQ+ebRMdcP
+Sqd5iTN9XuzIUVoBFAZBRjibKV3N2wxlnCbfLlzCyDp7rktzSThzjJ2pVDuLrMAx
+6/L9hIhwmFwdtY4FBFGvMR0b0Ugh3kCsRWr8sgj9I7dUoLHid6ObYhJFhnD3GzRc
+U+xX1uy3iTCqJDsG334aQIhC5Giuxln4SUZna2MNbq65ksh38N1aM/t3+Dc/TKVB
+rb5KWicRPCQ4DIQkHMDCSPyj+dvRLCPzIaPvHD7IrCfHYHOWuvvPGCpwjo0As3iP
+IecoMeguPLVaqgcAEQEAAQAL/i5/pQaUd4G7LDydpbixPS6r9UrfPrU/y5zvBP/p
+DCynPDutJ1oq539pZvXQ2VwEJJy7x0UVKkjyMndJLNWly9wHC7o8jkHx/NalVP47
+LXR+GWbCdOOcYYbdAWcCNB3zOtzPnWhdAEagkc2G9xRQDIB0dLHLCIUpCbLP/CWM
+qlHnDsVMrVTWjgzcpsnyGgw8NeLYJtYGB8dsN+XgCCjo7a9LEvUBKNgdmWBbf14/
+iBw7PCugazFcH9QYfZwzhsi3nqRRagTXHbxFRG0LD9Ro9qCEutHYGP2PJ59Nj8+M
+zaVkJj/OxWxVOGvn2q16mQBCjKpbWfqXZVVl+G5DGOmiSTZqXy+3j6JCKdOMy6Qd
+JBHOHhFZXYmWYaaPzoc33T/C3QhMfY5sOtUDLJmV05Wi4dyBeNBEslYgUuTk/jXb
+5ZAie25eDdrsoqkcnSs2ZguMF7AXhe6il2zVhUUMs/6UZgd6I7I4Is0HXT/pnxEp
+uiTRFu4v8E+u+5a8O3pffe5boQYA3TsIxceen20qY+kRaTOkURHMZLn/y6KLW8bZ
+rNJyXWS9hBAcbbSGhfOwYfzbDCM17yPQO3E2zo8lcGdRklUdIIaCxQwtu36N5dfx
+OLCCQc5LmYdl/EAm91iAhrr7dNntZ18MU09gdzUu+ONZwu4CP3cJT83+qYZULso8
+4Fvd/X8IEfGZ7kM+ylrdqBwtlrn8yYXtom+ows2M2UuNR53B+BUOd73kVLTkTCjE
+JH63+nE8BqG7tDLCMws+23SAA3xxBgDfDrr0x7zCozQKVQEqBzQr9Uoo/c/ZjAfi
+syzNSrDz+g5gqJYtuL9XpPJVWf6V1GXVyJlSbxR9CjTkBxmlPxpvV25IsbVSsh0o
+aqkf2eWpbCL6Qb2E0jd1rvf8sGeTTohzYfiSVVsC2t9ngRO/CmetizwQBvRzLGMZ
+4mtAPiy7ZEDc2dFrPp7zlKISYmJZUx/DJVuZWuOrVMpBP+bSgJXoMTlICxZUqUnE
+2VKVStb/L+Tl8XCwIWdrZb9BaDnHqfcGAM2B4HNPxP88Yj1tEDly/vqeb3vVMhj+
+S1lunnLdgxp46YyuTMYAzj88eCGurRtzBsdxxlGAsioEnZGebEqAHQbieKq/DO6I
+MOMZHMSVBDqyyIx3assGlxSX8BSFW0lhKyT7i0XqnAgCJ9f/5oq0SbFGq+01VQb7
+jIx9PbcYJORxsE0JG/CXXPv27bRtQXsudkWGSYvC0NLOgk4z8+kQpQtyFh16lujq
+WRwMeriu0qNDjCa1/eHIKDovhAZ3GyO5/9m1tBlUZXN0IFVzZXIgPHRlc3RAdGVz
+dC5jb20+iQHOBBMBCAA4AhsDBQsJCAcCBhUKCQgLAgQWAgMBAh4BAheAFiEEjrR8
+MQ4fJK44PYMvfN2AClLmXiYFAmDcEZEACgkQfN2AClLmXibZzgv/ZfeTpTuqQE1W
+C1jT5KpQExnt0BizTX0U7BvSn8Fr6VXTyol6kYc3u71GLUuJyawCLtIzOXqOXJvz
+bjcZqymcMADuftKcfMy513FhbF6MhdVd6QoeBP6+7/xXOFJCi+QVYF7SQ2h7K1Qm
++yXOiAMgSxhCZQGPBNJLlDUOd47nSIMANvlumFtmLY/1FD7RpG7WQWjeX1mnxNTw
+hUU+Yv7GuFc/JprXCIYqHbhWfvXyVtae2ZK4xuVi5eqwA2RfggOVM7drb+CgPhG0
++9aEDDLOZqVi65wK7J73Puo3rFTbPQMljxw5s27rWqF+vB6hhVdJOPNomWy3naPi
+k5MW0mhsacASz1WYndpZz+XaQTq/wJF5HUyyeUWJ0vlOEdwx021PHcqSTyfNnkjD
+KncrE21t2sxWRsgGDETxIwkd2b2HNGAvveUD0ffFK/oJHGSXjAERFGc3wuiDj3mQ
+BvKm4wt4QF9ZMrCdhMAA6ax5kfEUqQR4ntmrJk/khp/mV7TILaI4nQVYBGBjIyIB
+DADghIo9wXnRxzfdDTvwnP8dHpLAIaPokgdpyLswqUCixJWiW2xcV6weUjEWwH6n
+eN/t1uZYVehbrotxVPla+MPvzhxp6/cmG+2lhzEBOp6zRwnL1wIB6HoKJfpREhyM
+c8rLR0zMso1L1bJTyydvnu07a7BWo3VWKjilb0rEZZUSD/2hidx5HxMOJSoidLWe
+d/PPuv6yht3NtA4UThlcfldm9G6PbqCdm1kMEKAkq0wVJvhPJ6gEFRNJimgygfUw
+MDFXEIhQtxjgdV5Uoz3O5452VLoRsDlgpi3E0WDGj7WXDaO5uSU0T5aJgVgHCP/f
+xZhHuQFk2YYIl5nCBpOZyWWI0IKmscTuEwzpkhICQDQFvcMZ5ibsl7wA2P7YTrQf
+FDMjjzuaK80GYPfxDFlyKUyLqFt8w/QzsZLDLX7+jxIEpbRAaMw/JsWqm5BMxxbS
+3CIQiS5S3oSKDsNINelqWFfwvLhvlQra8gIxyNTlek25OdgG66BiiX+seH8A/ql+
+F+MAEQEAAQAL/1jrNSLjMt9pwo6qFKClVQZP2vf7+sH7v7LeHIDXr3EnYUnVYnOq
+B1FU5PspTp/+J9W25DB9CZLx7Gj8qeslFdiuLSOoIBB4RCToB3kAoeTH0DHqW/Gs
+hFTrmJkuDp9zpo/ek6SIXJx5rHAyR9KVw0fizQprH2f6PcgLbTWeM61dJuqowmg3
+7eCOyIKv7VQvFqEhYokLD+JNmrvg+Htg0DXGvdjRjAwPf/NezEXpj67a6cHTp1/C
+hwp7pevG+3fTxaCJFesl5/TxxtnaBLE8m2uo/S6Hxgn9l0edonroe1QlTjEqGLy2
+7qi2z5Rem+v6GWNDRgvAWur13v8FNdyduHlioG/NgRsU9mE2MYeFsfi3cfNpJQp/
+wC9PSCIXrb/45mkS8KyjZpCrIPB9RV/m0MREq01TPom7rstZc4A1pD0Ot7AtUYS3
+e95zLyEmeLziPJ9fV4fgPmEudDr1uItnmV0LOskKlpg5sc0hhdrwYoobfkKt2dx6
+DqfMlcM1ZkUbLQYA4jwfpFJG4HmYvjL2xCJxM0ycjvMbqFN+4UjgYWVlRfOrm1V4
+Op86FjbRbV6OOCNhznotAg7mul4xtzrrTkK8o3YLBeJseDgl4AWuzXtNa9hE0XpK
+9gJoEHUuBOOsamVh2HpXESFyE5CclOV7JSh541TlZKfnqfZYCg4JSbp0UijkawCL
+5bJJUiGGMD9rZUxIAKQO1DvUEzptS7Jl6S3y5sbIIhilp4KfYWbSk3PPu9CnZD5b
+LhEQp0elxnb/IL8PBgD+DpTeC8unkGKXUpbe9x0ISI6V1D6FmJq/FxNg7fMa3QCh
+fGiAyoTm80ZETynj+blRaDO3gY4lTLa3Opubof1EqK2QmwXmpyvXEZNYcQfQ2CCS
+GOWUCK8jEQamUPf1PWndZXJUmROI1WukhlL71V/ir6zQeVCv1wcwPwclJPnAe87u
+pEklnCYpvsEldwHUX9u0BWzoULIEsi+ddtHmT0KTeF/DHRy0W15jIHbjFqhqckj1
+/6fmr7l7kIi/kN4vWe0F/0Q8IXX+cVMgbl3aIuaGcvENLGcoAsAtPGx88SfRgmfu
+HK64Y7hx1m+Bo215rxJzZRjqHTBPp0BmCi+JKkaavIBrYRbsx20gveI4dzhLcUhB
+kiT4Q7oz0/VbGHS1CEf9KFeS/YOGj57s4yHauSVI0XdP9kBRTWmXvBkzsooB2cKH
+hwhUN7iiT1k717CiTNUT6Q/pcPFCyNuMoBBGQTU206JEgIjQvI3f8xMUMGmGVVQz
+9/k716ycnhb2JZ/Q/AyQIeHJiQG2BBgBCAAgAhsMFiEEjrR8MQ4fJK44PYMvfN2A
+ClLmXiYFAmDcEa4ACgkQfN2AClLmXiZxxQv/XaMN0hPCygtrQMbCsTNb34JbvJzh
+hngPuUAfTbRHrR3YeATyQofNbL0DD3fvfzeFF8qESqvzCSZxS6dYsXPd4MCJTzlp
+zYBZ2X0sOrgDqZvqCZKN72RKgdk0KvthdzAxsIm2dfcQOxxowXMxhJEXZmsFpusx
+jKJxOcrfVRjXJnh9isY0NpCoqMQ+3k3wDJ3VGEHV7G+A+vFkWfbLJF5huQ96uaH9
+Uc+jUsREUH9G82ZBqpoioEN8Ith4VXpYnKdTMonK/+ZcyeraJZhXrvbjnEomKdzU
+0pu4bt1HlLR3dcnpjN7b009MBf2xLgEfQk2nPZ4zzY+tDkxygtPllaB4dldFjBpT
+j7Q+t49sWMjmlJUbLlHfuJ7nUUK5+cGjBsWVObAEcyfemHWCTVFnEa2BJslGC08X
+rFcjRRcMEr9ct4551QFBHsv3O/Wp3/wqczYgE9itSnGT05w+4vLt4smG+dnEHjRJ
+brMb2upTHa+kjktjdO96/BgSnKYqmNmPB/qB
+=ivA/
+-----END PGP PRIVATE KEY BLOCK-----
+ """
+
+ DEFAULT_KEY_ID = "8EB47C310E1F24AE383D832F7CDD800A52E65E26"
+
+ NON_DEFAULT_KEY = """
+-----BEGIN PGP PRIVATE KEY BLOCK-----
+
+lQVYBGBjI0ABDADGWBRp+t02emfzUlhrc1psqIhhecFm6Em0Kv33cfDpnfoMF1tK
+Yy/4eLYIR7FmpdbFPcDThFNHbXJzBi00L1mp0XQE2l50h/2bDAAgREdZ+NVo5a7/
+RSZjauNU1PxW6pnXMehEh1tyIQmV78jAukaakwaicrpIenMiFUN3fAKHnLuFffA6
+t0f3LqJvTDhUw/o2vPgw5e6UDQhA1C+KTv1KXVrhJNo88a3hZqCZ76z3drKR411Q
+zYgT4DUb8lfnbN+z2wfqT9oM5cegh2k86/mxAA3BYOeQrhmQo/7uhezcgbxtdGZr
+YlbuaNDTSBrn10ZoaxLPo2dJe2zWxgD6MpvsGU1w3tcRW508qo/+xoWp2/pDzmok
++uhOh1NAj9zB05VWBz1r7oBgCOIKpkD/LD4VKq59etsZ/UnrYDwKdXWZp7uhshkU
+M7N35lUJcR76a852dlMdrgpmY18+BP7+o7M+5ElHTiqQbMuE1nHTg8RgVpdV+tUx
+dg6GWY/XHf5asm8AEQEAAQAL/A85epOp+GnymmEQfI3+5D178D//Lwu9n86vECB6
+xAHCqQtdjZnXpDp/1YUsL59P8nzgYRk7SoMskQDoQ/cB/XFuDOhEdMSgHaTVlnrj
+ktCCq6rqGnUosyolbb64vIfVaSqd/5SnCStpAsnaBoBYrAu4ZmV4xfjDQWwn0q5s
+u+r56mD0SkjPgbwk/b3qTVagVmf2OFzUgWwm1e/X+bA1oPag1NV8VS4hZPXswT4f
+qhiyqUFOgP6vUBcqehkjkIDIl/54xII7/P5tp3LIZawvIXqHKNTqYPCqaCqCj+SL
+vMYDIb6acjescfZoM71eAeHAANeFZzr/rwfBT+dEP6qKmPXNcvgE11X44ZCr04nT
+zOV/uDUifEvKT5qgtyJpSFEVr7EXubJPKoNNhoYqq9z1pYU7IedX5BloiVXKOKTY
+0pk7JkLqf3g5fYtXh/wol1owemITJy5V5PgaqZvk491LkI6S+kWC7ANYUg+TDPIW
+afxW3E5N1CYV6XDAl0ZihbLcoQYAy0Ky/p/wayWKePyuPBLwx9O89GSONK2pQljZ
+yaAgxPQ5/i1vx6LIMg7k/722bXR9W3zOjWOin4eatPM3d2hkG96HFvnBqXSmXOPV
+03Xqy1/B5Tj8E9naLKUHE/OBQEc363DgLLG9db5HfPlpAngeppYPdyWkhzXyzkgS
+PylaE5eW3zkdjEbYJ6RBTecTZEgBaMvJNPdWbn//frpP7kGvyiCg5Es+WjLInUZ6
+0sdifcNTCewzLXK80v/y5mVOdJhPBgD5zs9cYdyiQJayqAuOr+He1eMHMVUbm9as
+qBmPrst398eBW9ZYF7eBfTSlUf6B+WnvyLKEGsUf/7IK0EWDlzoBuWzWiHjUAY1g
+m9eTV2MnvCCCefqCErWwfFo2nWOasAZA9sKD+ICIBY4tbtvSl4yfLBzTMwSvs9ZS
+K1ocPSYUnhm2miSWZ8RLZPH7roHQasNHpyq/AX7DahFf2S/bJ+46ZGZ8Pigr7hA+
+MjmpQ4qVdb5SaViPmZhAKO+PjuCHm+EF/2H0Y3Sl4eXgxZWoQVOUeXdWg9eMfYrj
+XDtUMIFppV/QxbeztZKvJdfk64vt/crvLsOp0hOky9cKwY89r4QaHfexU3qR+qDq
+UlMvR1rHk7dS5HZAtw0xKsFJNkuDxvBkMqv8Los8zp3nUl+U99dfZOArzNkW38wx
+FPa0ixkC9za2BkDrWEA8vTnxw0A2upIFegDUhwOByrSyfPPnG3tKGeqt3Izb/kDk
+Q9vmo+HgxBOguMIvlzbBfQZwtbd/gXzlvPqCtCJBbm90aGVyIFRlc3QgVXNlciA8
+dGVzdDJAdGVzdC5jb20+iQHOBBMBCAA4AhsDBQsJCAcCBhUKCQgLAgQWAgMBAh4B
+AheAFiEEapM5P1DF5qzT1vtFuTYhLttOFMAFAmDcEeEACgkQuTYhLttOFMDe0Qv/
+Qx/bzXztJ3BCc+CYAVDx7Kr37S68etwwLgcWzhG+CDeMB5F/QE+upKgxy2iaqQFR
+mxfOMgf/TIQkUfkbaASzK1LpnesYO85pk7XYjoN1bYEHiXTkeW+bgB6aJIxrRmO2
+SrWasdBC/DsI3Mrya8YMt/TiHC6VpRJVxCe5vv7/kZC4CXrgTBnZocXx/YXimbke
+poPMVdbvhYh6N0aGeS38jRKgyN10KXmhDTAQDwseVFavBWAjVfx3DEwjtK2Z2GbA
+aL8JvAwRtqiPFkDMIKPL4UwxtXFws8SpMt6juroUkNyf6+BxNWYqmwXHPy8zCJAb
+xkxIJMlEc+s7qQsP3fILOo8Xn+dVzJ5sa5AoARoXm1GMjsdqaKAzq99Dic/dHnaQ
+Civev1PQsdwlYW2C2wNXNeIrxMndbDMFfNuZ6BnGHWJ/wjcp/pFs4YkyyZN8JH7L
+hP2FO4Jgham3AuP13kC3Ivea7V6hR8QNcDZRwFPOMIX4tXwQv1T72+7DZGaA25O7
+nQVXBGBjI0ABDADJMBYIcG0Yil9YxFs7aYzNbd7alUAr89VbY8eIGPHP3INFPM1w
+lBQCu+4j6xdEbhMpppLBZ9A5TEylP4C6qLtPa+oLtPeuSw8gHDE10XE4lbgPs376
+rL60XdImSOHhiduACUefYjqpcmFH9Bim1CC+koArYrSQJQx1Jri+OpnTaL/8UID0
+KzD/kEgMVGlHIVj9oJmb4+j9pW8I/g0wDSnIaEKFMxqu6SIVJ1GWj+MUMvZigjLC
+sNCZd7PnbOC5VeU3SsXj6he74Jx0AmGMPWIHi9M0DjHO5d1cCbXTnud8xxM1bOh4
+7aCTnMK5cVyIr+adihgJpVVhrndSM8aklBPRgtozrGNCgF2CkYU2P1blxfloNr/8
+UZpM83o+s1aObBszzRNLxnpNORqoLqjfPtLEPQnagxE+4EapCq0NZ/x6yO5VTwwp
+NljdFAEk40uGuKyn1QA3uNMHy5DlpLl+tU7t1KEovdZ+OVYsYKZhVzw0MTpKogk9
+JI7AN0q62ronPskAEQEAAQAL+O8BUSt1ZCVjPSIXIsrR+ZOSkszZwgJ1CWIoh0IH
+YD2vmcMHGIhFYgBdgerpvhptKhaw7GcXDScEnYkyh5s4GE2hxclik1tbj/x1gYCN
+8BNoyeDdPFxQG73qN12D99QYEctpOsz9xPLIDwmL0j1ehAfhwqHIAPm9Ca+i8JYM
+x/F+35S/jnKDXRI+NVlwbiEyXKXxxIqNlpy9i8sDBGexO5H5Sg0zSN/B1duLekGD
+biDw6gLc6bCgnS+0JOUpU07Z2fccMOY9ncjKGD2uIb/ePPUaek92GCQyq0eorCIV
+brcQsRc5sSsNtnRKQTQtxioROeDg7kf2oWySeHTswlXW/219ihrSXgteHJd+rPm7
+DYLEeGLRny8bRKv8rQdAtApHaJE4dAATXeY4RYo4NlXHYaztGYtU6kiM/3zCfWAe
+9Nn+Wh9jMTZrjefUCagS5r6ZqAh7veNo/vgIGaCLh0a1Ypa0Yk9KFrn3LYEM3zgk
+3m3bn+7qgy5cUYXoJ3DGJJEhBgDPonpW0WElqLs5ZMem1ha85SC38F0IkAaSuzuz
+v3eORiKWuyJGF32Q2XHa1RHQs1JtUKd8rxFer3b8Oq71zLz6JtVc9dmRudvgcJYX
+0PC11F6WGjZFSSp39dajFp0A5DKUs39F3w7J1yuDM56TDIN810ywufGAHARY1pZb
+UJAy/dTqjFnCbNjpAakor3hVzqxcmUG+7Y2X9c2AGncT1MqAQC3M8JZcuZvkK8A9
+cMk8B914ryYE7VsZMdMhyTwHmykGAPgNLLa3RDETeGeGCKWI+ZPOoU0ib5JtJZ1d
+P3tNwfZKuZBZXKW9gqYqyBa/qhMip84SP30pr/TvulcdAFC759HK8sQZyJ6Vw24P
+c+5ssRxrQUEw1rvJPWhmQCmCOZHBMQl5T6eaTOpR5u3aUKTMlxPKhK9eC1dCSTnI
+/nyL8An3VKnLy+K/LI42YGphBVLLJmBewuTVDIJviWRdntiG8dElyEJMOywUltk3
+2CEmqgsD9tPO8rXZjnMrMn3gfsiaoQYA6/6/e2utkHr7gAoWBgrBBdqVHsvqh5Ro
+2DjLAOpZItO/EdCJfDAmbTYOa04535sBDP2tcH/vipPOPpbr1Y9Y/mNsKCulNxed
+yqAmEkKOcerLUP5UHju0AB6VBjHJFdU2mqT+UjPyBk7WeKXgFomyoYMv3KpNOFWR
+xi0Xji4kKHbttA6Hy3UcGPr9acyUAlDYeKmxbSUYIPhw32bbGrX9+F5YriTufRsG
+3jftQVo9zqdcQSD/5pUTMn3EYbEcohYB2YWJAbYEGAEIACACGwwWIQRqkzk/UMXm
+rNPW+0W5NiEu204UwAUCYNwR6wAKCRC5NiEu204UwOPnC/92PgB1c3h9FBXH1maz
+g29fndHIHH65VLgqMiQ7HAMojwRlT5Xnj5tdkCBmszRkv5vMvdJRa3ZY8Ed/Inqr
+hxBFNzpjqX4oj/RYIQLKXWWfkTKYVLJFZFPCSo00jesw2gieu3Ke/Yy4gwhtNodA
+v+s6QNMvffTW/K3XNrWDB0E7/LXbdidzhm+MBu8ov2tuC3tp9liLICiE1jv/2xT4
+CNSO6yphmk1/1zEYHS/mN9qJ2csBmte2cdmGyOcuVEHk3pyINNMDOamaURBJGRwF
+XB5V7gTKUFU4jCp3chywKrBHJHxGGDUmPBmZtDtfWAOgL32drK7/KUyzZL/WO7Fj
+akOI0hRDFOcqTYWL20H7+hAiX3oHMP7eou3L5C7wJ9+JMcACklN/WMjG9a536DFJ
+4UgZ6HyKPP+wy837Hbe8b25kNMBwFgiaLR0lcgzxj7NyQWjVCMOEN+M55tRCjvL6
+ya6JVZCRbMXfdCy8lVPgtNQ6VlHaj8Wvnn2FLbWWO2n2r3s=
+=9zU5
+-----END PGP PRIVATE KEY BLOCK-----
+"""
+
+ NON_DEFAULT_KEY_ID = "6A93393F50C5E6ACD3D6FB45B936212EDB4E14C0"
+
+ def setUp(self):
+ super().setUp()
+ self.gpg_dir = os.path.join(self.test_dir, "gpg")
+ os.mkdir(self.gpg_dir, mode=0o700)
+ # Ignore errors when deleting GNUPGHOME, because of race conditions
+ # (e.g. the gpg-agent socket having been deleted). See
+ # https://github.com/jelmer/dulwich/issues/1000
+ self.addCleanup(shutil.rmtree, self.gpg_dir, ignore_errors=True)
+ self.overrideEnv("GNUPGHOME", self.gpg_dir)
+
+ def import_default_key(self):
+ subprocess.run(
+ ["gpg", "--import"],
+ stdout=subprocess.DEVNULL,
+ stderr=subprocess.DEVNULL,
+ input=PorcelainGpgTestCase.DEFAULT_KEY,
+ text=True,
+ )
+
+ def import_non_default_key(self):
+ subprocess.run(
+ ["gpg", "--import"],
+ stdout=subprocess.DEVNULL,
+ stderr=subprocess.DEVNULL,
+ input=PorcelainGpgTestCase.NON_DEFAULT_KEY,
+ text=True,
+ )
+
+
+class ArchiveTests(PorcelainTestCase):
+ """Tests for the archive command."""
+
+ def test_simple(self):
+ c1, c2, c3 = build_commit_graph(
+ self.repo.object_store, [[1], [2, 1], [3, 1, 2]]
+ )
+ self.repo.refs[b"refs/heads/master"] = c3.id
+ out = BytesIO()
+ err = BytesIO()
+ porcelain.archive(
+ self.repo.path, b"refs/heads/master", outstream=out, errstream=err
+ )
+ self.assertEqual(b"", err.getvalue())
+ tf = tarfile.TarFile(fileobj=out)
+ self.addCleanup(tf.close)
+ self.assertEqual([], tf.getnames())
+
+
+class UpdateServerInfoTests(PorcelainTestCase):
+ def test_simple(self):
+ c1, c2, c3 = build_commit_graph(
+ self.repo.object_store, [[1], [2, 1], [3, 1, 2]]
+ )
+ self.repo.refs[b"refs/heads/foo"] = c3.id
+ porcelain.update_server_info(self.repo.path)
+ self.assertTrue(
+ os.path.exists(os.path.join(self.repo.controldir(), "info", "refs"))
+ )
+
+
+class CommitTests(PorcelainTestCase):
+ def test_custom_author(self):
+ c1, c2, c3 = build_commit_graph(
+ self.repo.object_store, [[1], [2, 1], [3, 1, 2]]
+ )
+ self.repo.refs[b"refs/heads/foo"] = c3.id
+ sha = porcelain.commit(
+ self.repo.path,
+ message=b"Some message",
+ author=b"Joe <joe@example.com>",
+ committer=b"Bob <bob@example.com>",
+ )
+ self.assertIsInstance(sha, bytes)
+ self.assertEqual(len(sha), 40)
+
+ def test_unicode(self):
+ c1, c2, c3 = build_commit_graph(
+ self.repo.object_store, [[1], [2, 1], [3, 1, 2]]
+ )
+ self.repo.refs[b"refs/heads/foo"] = c3.id
+ sha = porcelain.commit(
+ self.repo.path,
+ message="Some message",
+ author="Joe <joe@example.com>",
+ committer="Bob <bob@example.com>",
+ )
+ self.assertIsInstance(sha, bytes)
+ self.assertEqual(len(sha), 40)
+
+ def test_no_verify(self):
+ if os.name != "posix":
+ self.skipTest("shell hook tests requires POSIX shell")
+ self.assertTrue(os.path.exists("/bin/sh"))
+
+ hooks_dir = os.path.join(self.repo.controldir(), "hooks")
+ os.makedirs(hooks_dir, exist_ok=True)
+ self.addCleanup(shutil.rmtree, hooks_dir)
+
+ c1, c2, c3 = build_commit_graph(
+ self.repo.object_store, [[1], [2, 1], [3, 1, 2]]
+ )
+
+ hook_fail = "#!/bin/sh\nexit 1"
+
+ # hooks are executed in pre-commit, commit-msg order
+ # test commit-msg failure first, then pre-commit failure, then
+ # no_verify to skip both hooks
+ commit_msg = os.path.join(hooks_dir, "commit-msg")
+ with open(commit_msg, "w") as f:
+ f.write(hook_fail)
+ os.chmod(commit_msg, stat.S_IREAD | stat.S_IWRITE | stat.S_IEXEC)
+
+ with self.assertRaises(CommitError):
+ porcelain.commit(
+ self.repo.path,
+ message="Some message",
+ author="Joe <joe@example.com>",
+ committer="Bob <bob@example.com>",
+ )
+
+ pre_commit = os.path.join(hooks_dir, "pre-commit")
+ with open(pre_commit, "w") as f:
+ f.write(hook_fail)
+ os.chmod(pre_commit, stat.S_IREAD | stat.S_IWRITE | stat.S_IEXEC)
+
+ with self.assertRaises(CommitError):
+ porcelain.commit(
+ self.repo.path,
+ message="Some message",
+ author="Joe <joe@example.com>",
+ committer="Bob <bob@example.com>",
+ )
+
+ sha = porcelain.commit(
+ self.repo.path,
+ message="Some message",
+ author="Joe <joe@example.com>",
+ committer="Bob <bob@example.com>",
+ no_verify=True,
+ )
+ self.assertIsInstance(sha, bytes)
+ self.assertEqual(len(sha), 40)
+
+ def test_timezone(self):
+ c1, c2, c3 = build_commit_graph(
+ self.repo.object_store, [[1], [2, 1], [3, 1, 2]]
+ )
+ self.repo.refs[b"refs/heads/foo"] = c3.id
+ sha = porcelain.commit(
+ self.repo.path,
+ message="Some message",
+ author="Joe <joe@example.com>",
+ author_timezone=18000,
+ committer="Bob <bob@example.com>",
+ commit_timezone=18000,
+ )
+ self.assertIsInstance(sha, bytes)
+ self.assertEqual(len(sha), 40)
+
+ commit = self.repo.get_object(sha)
+ self.assertEqual(commit._author_timezone, 18000)
+ self.assertEqual(commit._commit_timezone, 18000)
+
+ self.overrideEnv("GIT_AUTHOR_DATE", "1995-11-20T19:12:08-0501")
+ self.overrideEnv("GIT_COMMITTER_DATE", "1995-11-20T19:12:08-0501")
+
+ sha = porcelain.commit(
+ self.repo.path,
+ message="Some message",
+ author="Joe <joe@example.com>",
+ committer="Bob <bob@example.com>",
+ )
+ self.assertIsInstance(sha, bytes)
+ self.assertEqual(len(sha), 40)
+
+ commit = self.repo.get_object(sha)
+ self.assertEqual(commit._author_timezone, -18060)
+ self.assertEqual(commit._commit_timezone, -18060)
+
+ self.overrideEnv("GIT_AUTHOR_DATE", None)
+ self.overrideEnv("GIT_COMMITTER_DATE", None)
+
+ local_timezone = time.localtime().tm_gmtoff
+
+ sha = porcelain.commit(
+ self.repo.path,
+ message="Some message",
+ author="Joe <joe@example.com>",
+ committer="Bob <bob@example.com>",
+ )
+ self.assertIsInstance(sha, bytes)
+ self.assertEqual(len(sha), 40)
+
+ commit = self.repo.get_object(sha)
+ self.assertEqual(commit._author_timezone, local_timezone)
+ self.assertEqual(commit._commit_timezone, local_timezone)
+
+
+@skipIf(
+ platform.python_implementation() == "PyPy" or sys.platform == "win32",
+ "gpgme not easily available or supported on Windows and PyPy",
+)
+class CommitSignTests(PorcelainGpgTestCase):
+ def test_default_key(self):
+ c1, c2, c3 = build_commit_graph(
+ self.repo.object_store, [[1], [2, 1], [3, 1, 2]]
+ )
+ self.repo.refs[b"HEAD"] = c3.id
+ cfg = self.repo.get_config()
+ cfg.set(("user",), "signingKey", PorcelainGpgTestCase.DEFAULT_KEY_ID)
+ self.import_default_key()
+
+ sha = porcelain.commit(
+ self.repo.path,
+ message="Some message",
+ author="Joe <joe@example.com>",
+ committer="Bob <bob@example.com>",
+ signoff=True,
+ )
+ self.assertIsInstance(sha, bytes)
+ self.assertEqual(len(sha), 40)
+
+ commit = self.repo.get_object(sha)
+ # GPG Signatures aren't deterministic, so we can't do a static assertion.
+ commit.verify()
+ commit.verify(keyids=[PorcelainGpgTestCase.DEFAULT_KEY_ID])
+
+ self.import_non_default_key()
+ self.assertRaises(
+ gpg.errors.MissingSignatures,
+ commit.verify,
+ keyids=[PorcelainGpgTestCase.NON_DEFAULT_KEY_ID],
+ )
+
+ commit.committer = b"Alice <alice@example.com>"
+ self.assertRaises(
+ gpg.errors.BadSignatures,
+ commit.verify,
+ )
+
+ def test_non_default_key(self):
+ c1, c2, c3 = build_commit_graph(
+ self.repo.object_store, [[1], [2, 1], [3, 1, 2]]
+ )
+ self.repo.refs[b"HEAD"] = c3.id
+ cfg = self.repo.get_config()
+ cfg.set(("user",), "signingKey", PorcelainGpgTestCase.DEFAULT_KEY_ID)
+ self.import_non_default_key()
+
+ sha = porcelain.commit(
+ self.repo.path,
+ message="Some message",
+ author="Joe <joe@example.com>",
+ committer="Bob <bob@example.com>",
+ signoff=PorcelainGpgTestCase.NON_DEFAULT_KEY_ID,
+ )
+ self.assertIsInstance(sha, bytes)
+ self.assertEqual(len(sha), 40)
+
+ commit = self.repo.get_object(sha)
+ # GPG Signatures aren't deterministic, so we can't do a static assertion.
+ commit.verify()
+
+
+class TimezoneTests(PorcelainTestCase):
+ def put_envs(self, value):
+ self.overrideEnv("GIT_AUTHOR_DATE", value)
+ self.overrideEnv("GIT_COMMITTER_DATE", value)
+
+ def fallback(self, value):
+ self.put_envs(value)
+ self.assertRaises(porcelain.TimezoneFormatError, porcelain.get_user_timezones)
+
+ def test_internal_format(self):
+ self.put_envs("0 +0500")
+ self.assertTupleEqual((18000, 18000), porcelain.get_user_timezones())
+
+ def test_rfc_2822(self):
+ self.put_envs("Mon, 20 Nov 1995 19:12:08 -0500")
+ self.assertTupleEqual((-18000, -18000), porcelain.get_user_timezones())
+
+ self.put_envs("Mon, 20 Nov 1995 19:12:08")
+ self.assertTupleEqual((0, 0), porcelain.get_user_timezones())
+
+ def test_iso8601(self):
+ self.put_envs("1995-11-20T19:12:08-0501")
+ self.assertTupleEqual((-18060, -18060), porcelain.get_user_timezones())
+
+ self.put_envs("1995-11-20T19:12:08+0501")
+ self.assertTupleEqual((18060, 18060), porcelain.get_user_timezones())
+
+ self.put_envs("1995-11-20T19:12:08-05:01")
+ self.assertTupleEqual((-18060, -18060), porcelain.get_user_timezones())
+
+ self.put_envs("1995-11-20 19:12:08-05")
+ self.assertTupleEqual((-18000, -18000), porcelain.get_user_timezones())
+
+ # https://github.com/git/git/blob/96b2d4fa927c5055adc5b1d08f10a5d7352e2989/t/t6300-for-each-ref.sh#L128
+ self.put_envs("2006-07-03 17:18:44 +0200")
+ self.assertTupleEqual((7200, 7200), porcelain.get_user_timezones())
+
+ def test_missing_or_malformed(self):
+ # TODO: add more here
+ self.fallback("0 + 0500")
+ self.fallback("a +0500")
+
+ self.fallback("1995-11-20T19:12:08")
+ self.fallback("1995-11-20T19:12:08-05:")
+
+ self.fallback("1995.11.20")
+ self.fallback("11/20/1995")
+ self.fallback("20.11.1995")
+
+ def test_different_envs(self):
+ self.overrideEnv("GIT_AUTHOR_DATE", "0 +0500")
+ self.overrideEnv("GIT_COMMITTER_DATE", "0 +0501")
+ self.assertTupleEqual((18000, 18060), porcelain.get_user_timezones())
+
+ def test_no_envs(self):
+ local_timezone = time.localtime().tm_gmtoff
+
+ self.put_envs("0 +0500")
+ self.assertTupleEqual((18000, 18000), porcelain.get_user_timezones())
+
+ self.overrideEnv("GIT_COMMITTER_DATE", None)
+ self.assertTupleEqual((18000, local_timezone), porcelain.get_user_timezones())
+
+ self.put_envs("0 +0500")
+ self.overrideEnv("GIT_AUTHOR_DATE", None)
+ self.assertTupleEqual((local_timezone, 18000), porcelain.get_user_timezones())
+
+ self.put_envs("0 +0500")
+ self.overrideEnv("GIT_AUTHOR_DATE", None)
+ self.overrideEnv("GIT_COMMITTER_DATE", None)
+ self.assertTupleEqual(
+ (local_timezone, local_timezone), porcelain.get_user_timezones()
+ )
+
+
+class CleanTests(PorcelainTestCase):
+ def put_files(self, tracked, ignored, untracked, empty_dirs):
+ """Put the described files in the wd."""
+ all_files = tracked | ignored | untracked
+ for file_path in all_files:
+ abs_path = os.path.join(self.repo.path, file_path)
+ # File may need to be written in a dir that doesn't exist yet, so
+ # create the parent dir(s) as necessary
+ parent_dir = os.path.dirname(abs_path)
+ try:
+ os.makedirs(parent_dir)
+ except FileExistsError:
+ pass
+ with open(abs_path, "w") as f:
+ f.write("")
+
+ with open(os.path.join(self.repo.path, ".gitignore"), "w") as f:
+ f.writelines(ignored)
+
+ for dir_path in empty_dirs:
+ os.mkdir(os.path.join(self.repo.path, "empty_dir"))
+
+ files_to_add = [os.path.join(self.repo.path, t) for t in tracked]
+ porcelain.add(repo=self.repo.path, paths=files_to_add)
+ porcelain.commit(repo=self.repo.path, message="init commit")
+
+ def assert_wd(self, expected_paths):
+ """Assert paths of files and dirs in wd are same as expected_paths."""
+ control_dir_rel = os.path.relpath(self.repo._controldir, self.repo.path)
+
+ # normalize paths to simplify comparison across platforms
+ found_paths = {
+ os.path.normpath(p)
+ for p in flat_walk_dir(self.repo.path)
+ if not p.split(os.sep)[0] == control_dir_rel
+ }
+ norm_expected_paths = {os.path.normpath(p) for p in expected_paths}
+ self.assertEqual(found_paths, norm_expected_paths)
+
+ def test_from_root(self):
+ self.put_files(
+ tracked={"tracked_file", "tracked_dir/tracked_file", ".gitignore"},
+ ignored={"ignored_file"},
+ untracked={
+ "untracked_file",
+ "tracked_dir/untracked_dir/untracked_file",
+ "untracked_dir/untracked_dir/untracked_file",
+ },
+ empty_dirs={"empty_dir"},
+ )
+
+ porcelain.clean(repo=self.repo.path, target_dir=self.repo.path)
+
+ self.assert_wd(
+ {
+ "tracked_file",
+ "tracked_dir/tracked_file",
+ ".gitignore",
+ "ignored_file",
+ "tracked_dir",
+ }
+ )
+
+ def test_from_subdir(self):
+ self.put_files(
+ tracked={"tracked_file", "tracked_dir/tracked_file", ".gitignore"},
+ ignored={"ignored_file"},
+ untracked={
+ "untracked_file",
+ "tracked_dir/untracked_dir/untracked_file",
+ "untracked_dir/untracked_dir/untracked_file",
+ },
+ empty_dirs={"empty_dir"},
+ )
+
+ porcelain.clean(
+ repo=self.repo,
+ target_dir=os.path.join(self.repo.path, "untracked_dir"),
+ )
+
+ self.assert_wd(
+ {
+ "tracked_file",
+ "tracked_dir/tracked_file",
+ ".gitignore",
+ "ignored_file",
+ "untracked_file",
+ "tracked_dir/untracked_dir/untracked_file",
+ "empty_dir",
+ "untracked_dir",
+ "tracked_dir",
+ "tracked_dir/untracked_dir",
+ }
+ )
+
+
+class CloneTests(PorcelainTestCase):
+ def test_simple_local(self):
+ f1_1 = make_object(Blob, data=b"f1")
+ commit_spec = [[1], [2, 1], [3, 1, 2]]
+ trees = {
+ 1: [(b"f1", f1_1), (b"f2", f1_1)],
+ 2: [(b"f1", f1_1), (b"f2", f1_1)],
+ 3: [(b"f1", f1_1), (b"f2", f1_1)],
+ }
+
+ c1, c2, c3 = build_commit_graph(self.repo.object_store, commit_spec, trees)
+ self.repo.refs[b"refs/heads/master"] = c3.id
+ self.repo.refs[b"refs/tags/foo"] = c3.id
+ target_path = tempfile.mkdtemp()
+ errstream = BytesIO()
+ self.addCleanup(shutil.rmtree, target_path)
+ r = porcelain.clone(
+ self.repo.path, target_path, checkout=False, errstream=errstream
+ )
+ self.addCleanup(r.close)
+ self.assertEqual(r.path, target_path)
+ target_repo = Repo(target_path)
+ self.assertEqual(0, len(target_repo.open_index()))
+ self.assertEqual(c3.id, target_repo.refs[b"refs/tags/foo"])
+ self.assertNotIn(b"f1", os.listdir(target_path))
+ self.assertNotIn(b"f2", os.listdir(target_path))
+ c = r.get_config()
+ encoded_path = self.repo.path
+ if not isinstance(encoded_path, bytes):
+ encoded_path = encoded_path.encode("utf-8")
+ self.assertEqual(encoded_path, c.get((b"remote", b"origin"), b"url"))
+ self.assertEqual(
+ b"+refs/heads/*:refs/remotes/origin/*",
+ c.get((b"remote", b"origin"), b"fetch"),
+ )
+
+ def test_simple_local_with_checkout(self):
+ f1_1 = make_object(Blob, data=b"f1")
+ commit_spec = [[1], [2, 1], [3, 1, 2]]
+ trees = {
+ 1: [(b"f1", f1_1), (b"f2", f1_1)],
+ 2: [(b"f1", f1_1), (b"f2", f1_1)],
+ 3: [(b"f1", f1_1), (b"f2", f1_1)],
+ }
+
+ c1, c2, c3 = build_commit_graph(self.repo.object_store, commit_spec, trees)
+ self.repo.refs[b"refs/heads/master"] = c3.id
+ target_path = tempfile.mkdtemp()
+ errstream = BytesIO()
+ self.addCleanup(shutil.rmtree, target_path)
+ with porcelain.clone(
+ self.repo.path, target_path, checkout=True, errstream=errstream
+ ) as r:
+ self.assertEqual(r.path, target_path)
+ with Repo(target_path) as r:
+ self.assertEqual(r.head(), c3.id)
+ self.assertIn("f1", os.listdir(target_path))
+ self.assertIn("f2", os.listdir(target_path))
+
+ def test_bare_local_with_checkout(self):
+ f1_1 = make_object(Blob, data=b"f1")
+ commit_spec = [[1], [2, 1], [3, 1, 2]]
+ trees = {
+ 1: [(b"f1", f1_1), (b"f2", f1_1)],
+ 2: [(b"f1", f1_1), (b"f2", f1_1)],
+ 3: [(b"f1", f1_1), (b"f2", f1_1)],
+ }
+
+ c1, c2, c3 = build_commit_graph(self.repo.object_store, commit_spec, trees)
+ self.repo.refs[b"refs/heads/master"] = c3.id
+ target_path = tempfile.mkdtemp()
+ errstream = BytesIO()
+ self.addCleanup(shutil.rmtree, target_path)
+ with porcelain.clone(
+ self.repo.path, target_path, bare=True, errstream=errstream
+ ) as r:
+ self.assertEqual(r.path, target_path)
+ with Repo(target_path) as r:
+ r.head()
+ self.assertRaises(NoIndexPresent, r.open_index)
+ self.assertNotIn(b"f1", os.listdir(target_path))
+ self.assertNotIn(b"f2", os.listdir(target_path))
+
+ def test_no_checkout_with_bare(self):
+ f1_1 = make_object(Blob, data=b"f1")
+ commit_spec = [[1]]
+ trees = {1: [(b"f1", f1_1), (b"f2", f1_1)]}
+
+ (c1,) = build_commit_graph(self.repo.object_store, commit_spec, trees)
+ self.repo.refs[b"refs/heads/master"] = c1.id
+ self.repo.refs[b"HEAD"] = c1.id
+ target_path = tempfile.mkdtemp()
+ errstream = BytesIO()
+ self.addCleanup(shutil.rmtree, target_path)
+ self.assertRaises(
+ porcelain.Error,
+ porcelain.clone,
+ self.repo.path,
+ target_path,
+ checkout=True,
+ bare=True,
+ errstream=errstream,
+ )
+
+ def test_no_head_no_checkout(self):
+ f1_1 = make_object(Blob, data=b"f1")
+ commit_spec = [[1]]
+ trees = {1: [(b"f1", f1_1), (b"f2", f1_1)]}
+
+ (c1,) = build_commit_graph(self.repo.object_store, commit_spec, trees)
+ self.repo.refs[b"refs/heads/master"] = c1.id
+ target_path = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, target_path)
+ errstream = BytesIO()
+ r = porcelain.clone(
+ self.repo.path, target_path, checkout=True, errstream=errstream
+ )
+ r.close()
+
+ def test_no_head_no_checkout_outstream_errstream_autofallback(self):
+ f1_1 = make_object(Blob, data=b"f1")
+ commit_spec = [[1]]
+ trees = {1: [(b"f1", f1_1), (b"f2", f1_1)]}
+
+ (c1,) = build_commit_graph(self.repo.object_store, commit_spec, trees)
+ self.repo.refs[b"refs/heads/master"] = c1.id
+ target_path = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, target_path)
+ errstream = porcelain.NoneStream()
+ r = porcelain.clone(
+ self.repo.path, target_path, checkout=True, errstream=errstream
+ )
+ r.close()
+
+ def test_source_broken(self):
+ with tempfile.TemporaryDirectory() as parent:
+ target_path = os.path.join(parent, "target")
+ self.assertRaises(
+ Exception, porcelain.clone, "/nonexistent/repo", target_path
+ )
+ self.assertFalse(os.path.exists(target_path))
+
+ def test_fetch_symref(self):
+ f1_1 = make_object(Blob, data=b"f1")
+ trees = {1: [(b"f1", f1_1), (b"f2", f1_1)]}
+ [c1] = build_commit_graph(self.repo.object_store, [[1]], trees)
+ self.repo.refs.set_symbolic_ref(b"HEAD", b"refs/heads/else")
+ self.repo.refs[b"refs/heads/else"] = c1.id
+ target_path = tempfile.mkdtemp()
+ errstream = BytesIO()
+ self.addCleanup(shutil.rmtree, target_path)
+ r = porcelain.clone(
+ self.repo.path, target_path, checkout=False, errstream=errstream
+ )
+ self.addCleanup(r.close)
+ self.assertEqual(r.path, target_path)
+ target_repo = Repo(target_path)
+ self.assertEqual(0, len(target_repo.open_index()))
+ self.assertEqual(c1.id, target_repo.refs[b"refs/heads/else"])
+ self.assertEqual(c1.id, target_repo.refs[b"HEAD"])
+ self.assertEqual(
+ {
+ b"HEAD": b"refs/heads/else",
+ b"refs/remotes/origin/HEAD": b"refs/remotes/origin/else",
+ },
+ target_repo.refs.get_symrefs(),
+ )
+
+ def test_detached_head(self):
+ f1_1 = make_object(Blob, data=b"f1")
+ commit_spec = [[1], [2, 1], [3, 1, 2]]
+ trees = {
+ 1: [(b"f1", f1_1), (b"f2", f1_1)],
+ 2: [(b"f1", f1_1), (b"f2", f1_1)],
+ 3: [(b"f1", f1_1), (b"f2", f1_1)],
+ }
+
+ c1, c2, c3 = build_commit_graph(self.repo.object_store, commit_spec, trees)
+ self.repo.refs[b"refs/heads/master"] = c2.id
+ self.repo.refs.remove_if_equals(b"HEAD", None)
+ self.repo.refs[b"HEAD"] = c3.id
+ target_path = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, target_path)
+ errstream = porcelain.NoneStream()
+ with porcelain.clone(
+ self.repo.path, target_path, checkout=True, errstream=errstream
+ ) as r:
+ self.assertEqual(c3.id, r.refs[b"HEAD"])
+
+
+class InitTests(TestCase):
+ def test_non_bare(self):
+ repo_dir = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, repo_dir)
+ porcelain.init(repo_dir)
+
+ def test_bare(self):
+ repo_dir = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, repo_dir)
+ porcelain.init(repo_dir, bare=True)
+
+
+class AddTests(PorcelainTestCase):
+ def test_add_default_paths(self):
+ # create a file for initial commit
+ fullpath = os.path.join(self.repo.path, "blah")
+ with open(fullpath, "w") as f:
+ f.write("\n")
+ porcelain.add(repo=self.repo.path, paths=[fullpath])
+ porcelain.commit(
+ repo=self.repo.path,
+ message=b"test",
+ author=b"test <email>",
+ committer=b"test <email>",
+ )
+
+ # Add a second test file and a file in a directory
+ with open(os.path.join(self.repo.path, "foo"), "w") as f:
+ f.write("\n")
+ os.mkdir(os.path.join(self.repo.path, "adir"))
+ with open(os.path.join(self.repo.path, "adir", "afile"), "w") as f:
+ f.write("\n")
+ cwd = os.getcwd()
+ try:
+ os.chdir(self.repo.path)
+ self.assertEqual({"foo", "blah", "adir", ".git"}, set(os.listdir(".")))
+ self.assertEqual(
+ (["foo", os.path.join("adir", "afile")], set()),
+ porcelain.add(self.repo.path),
+ )
+ finally:
+ os.chdir(cwd)
+
+ # Check that foo was added and nothing in .git was modified
+ index = self.repo.open_index()
+ self.assertEqual(sorted(index), [b"adir/afile", b"blah", b"foo"])
+
+ def test_add_default_paths_subdir(self):
+ os.mkdir(os.path.join(self.repo.path, "foo"))
+ with open(os.path.join(self.repo.path, "blah"), "w") as f:
+ f.write("\n")
+ with open(os.path.join(self.repo.path, "foo", "blie"), "w") as f:
+ f.write("\n")
+
+ cwd = os.getcwd()
+ try:
+ os.chdir(os.path.join(self.repo.path, "foo"))
+ porcelain.add(repo=self.repo.path)
+ porcelain.commit(
+ repo=self.repo.path,
+ message=b"test",
+ author=b"test <email>",
+ committer=b"test <email>",
+ )
+ finally:
+ os.chdir(cwd)
+
+ index = self.repo.open_index()
+ self.assertEqual(sorted(index), [b"foo/blie"])
+
+ def test_add_file(self):
+ fullpath = os.path.join(self.repo.path, "foo")
+ with open(fullpath, "w") as f:
+ f.write("BAR")
+ porcelain.add(self.repo.path, paths=[fullpath])
+ self.assertIn(b"foo", self.repo.open_index())
+
+ def test_add_ignored(self):
+ with open(os.path.join(self.repo.path, ".gitignore"), "w") as f:
+ f.write("foo\nsubdir/")
+ with open(os.path.join(self.repo.path, "foo"), "w") as f:
+ f.write("BAR")
+ with open(os.path.join(self.repo.path, "bar"), "w") as f:
+ f.write("BAR")
+ os.mkdir(os.path.join(self.repo.path, "subdir"))
+ with open(os.path.join(self.repo.path, "subdir", "baz"), "w") as f:
+ f.write("BAZ")
+ (added, ignored) = porcelain.add(
+ self.repo.path,
+ paths=[
+ os.path.join(self.repo.path, "foo"),
+ os.path.join(self.repo.path, "bar"),
+ os.path.join(self.repo.path, "subdir"),
+ ],
+ )
+ self.assertIn(b"bar", self.repo.open_index())
+ self.assertEqual({"bar"}, set(added))
+ self.assertEqual({"foo", os.path.join("subdir", "")}, ignored)
+
+ def test_add_file_absolute_path(self):
+ # Absolute paths are (not yet) supported
+ with open(os.path.join(self.repo.path, "foo"), "w") as f:
+ f.write("BAR")
+ porcelain.add(self.repo, paths=[os.path.join(self.repo.path, "foo")])
+ self.assertIn(b"foo", self.repo.open_index())
+
+ def test_add_not_in_repo(self):
+ with open(os.path.join(self.test_dir, "foo"), "w") as f:
+ f.write("BAR")
+ self.assertRaises(
+ ValueError,
+ porcelain.add,
+ self.repo,
+ paths=[os.path.join(self.test_dir, "foo")],
+ )
+ self.assertRaises(
+ (ValueError, FileNotFoundError),
+ porcelain.add,
+ self.repo,
+ paths=["../foo"],
+ )
+ self.assertEqual([], list(self.repo.open_index()))
+
+ def test_add_file_clrf_conversion(self):
+ # Set the right configuration to the repo
+ c = self.repo.get_config()
+ c.set("core", "autocrlf", "input")
+ c.write_to_path()
+
+ # Add a file with CRLF line-ending
+ fullpath = os.path.join(self.repo.path, "foo")
+ with open(fullpath, "wb") as f:
+ f.write(b"line1\r\nline2")
+ porcelain.add(self.repo.path, paths=[fullpath])
+
+ # The line-endings should have been converted to LF
+ index = self.repo.open_index()
+ self.assertIn(b"foo", index)
+
+ entry = index[b"foo"]
+ blob = self.repo[entry.sha]
+ self.assertEqual(blob.data, b"line1\nline2")
+
+
+class RemoveTests(PorcelainTestCase):
+ def test_remove_file(self):
+ fullpath = os.path.join(self.repo.path, "foo")
+ with open(fullpath, "w") as f:
+ f.write("BAR")
+ porcelain.add(self.repo.path, paths=[fullpath])
+ porcelain.commit(
+ repo=self.repo,
+ message=b"test",
+ author=b"test <email>",
+ committer=b"test <email>",
+ )
+ self.assertTrue(os.path.exists(os.path.join(self.repo.path, "foo")))
+ cwd = os.getcwd()
+ try:
+ os.chdir(self.repo.path)
+ porcelain.remove(self.repo.path, paths=["foo"])
+ finally:
+ os.chdir(cwd)
+ self.assertFalse(os.path.exists(os.path.join(self.repo.path, "foo")))
+
+ def test_remove_file_staged(self):
+ fullpath = os.path.join(self.repo.path, "foo")
+ with open(fullpath, "w") as f:
+ f.write("BAR")
+ cwd = os.getcwd()
+ try:
+ os.chdir(self.repo.path)
+ porcelain.add(self.repo.path, paths=[fullpath])
+ self.assertRaises(Exception, porcelain.rm, self.repo.path, paths=["foo"])
+ finally:
+ os.chdir(cwd)
+
+ def test_remove_file_removed_on_disk(self):
+ fullpath = os.path.join(self.repo.path, "foo")
+ with open(fullpath, "w") as f:
+ f.write("BAR")
+ porcelain.add(self.repo.path, paths=[fullpath])
+ cwd = os.getcwd()
+ try:
+ os.chdir(self.repo.path)
+ os.remove(fullpath)
+ porcelain.remove(self.repo.path, paths=["foo"])
+ finally:
+ os.chdir(cwd)
+ self.assertFalse(os.path.exists(os.path.join(self.repo.path, "foo")))
+
+
+class LogTests(PorcelainTestCase):
+ def test_simple(self):
+ c1, c2, c3 = build_commit_graph(
+ self.repo.object_store, [[1], [2, 1], [3, 1, 2]]
+ )
+ self.repo.refs[b"HEAD"] = c3.id
+ outstream = StringIO()
+ porcelain.log(self.repo.path, outstream=outstream)
+ self.assertEqual(3, outstream.getvalue().count("-" * 50))
+
+ def test_max_entries(self):
+ c1, c2, c3 = build_commit_graph(
+ self.repo.object_store, [[1], [2, 1], [3, 1, 2]]
+ )
+ self.repo.refs[b"HEAD"] = c3.id
+ outstream = StringIO()
+ porcelain.log(self.repo.path, outstream=outstream, max_entries=1)
+ self.assertEqual(1, outstream.getvalue().count("-" * 50))
+
+
+class ShowTests(PorcelainTestCase):
+ def test_nolist(self):
+ c1, c2, c3 = build_commit_graph(
+ self.repo.object_store, [[1], [2, 1], [3, 1, 2]]
+ )
+ self.repo.refs[b"HEAD"] = c3.id
+ outstream = StringIO()
+ porcelain.show(self.repo.path, objects=c3.id, outstream=outstream)
+ self.assertTrue(outstream.getvalue().startswith("-" * 50))
+
+ def test_simple(self):
+ c1, c2, c3 = build_commit_graph(
+ self.repo.object_store, [[1], [2, 1], [3, 1, 2]]
+ )
+ self.repo.refs[b"HEAD"] = c3.id
+ outstream = StringIO()
+ porcelain.show(self.repo.path, objects=[c3.id], outstream=outstream)
+ self.assertTrue(outstream.getvalue().startswith("-" * 50))
+
+ def test_blob(self):
+ b = Blob.from_string(b"The Foo\n")
+ self.repo.object_store.add_object(b)
+ outstream = StringIO()
+ porcelain.show(self.repo.path, objects=[b.id], outstream=outstream)
+ self.assertEqual(outstream.getvalue(), "The Foo\n")
+
+ def test_commit_no_parent(self):
+ a = Blob.from_string(b"The Foo\n")
+ ta = Tree()
+ ta.add(b"somename", 0o100644, a.id)
+ ca = make_commit(tree=ta.id)
+ self.repo.object_store.add_objects([(a, None), (ta, None), (ca, None)])
+ outstream = StringIO()
+ porcelain.show(self.repo.path, objects=[ca.id], outstream=outstream)
+ self.assertMultiLineEqual(
+ outstream.getvalue(),
+ """\
+--------------------------------------------------
+commit: 344da06c1bb85901270b3e8875c988a027ec087d
+Author: Test Author <test@nodomain.com>
+Committer: Test Committer <test@nodomain.com>
+Date: Fri Jan 01 2010 00:00:00 +0000
+
+Test message.
+
+diff --git a/somename b/somename
+new file mode 100644
+index 0000000..ea5c7bf
+--- /dev/null
++++ b/somename
+@@ -0,0 +1 @@
++The Foo
+""",
+ )
+
+ def test_tag(self):
+ a = Blob.from_string(b"The Foo\n")
+ ta = Tree()
+ ta.add(b"somename", 0o100644, a.id)
+ ca = make_commit(tree=ta.id)
+ self.repo.object_store.add_objects([(a, None), (ta, None), (ca, None)])
+ porcelain.tag_create(
+ self.repo.path,
+ b"tryme",
+ b"foo <foo@bar.com>",
+ b"bar",
+ annotated=True,
+ objectish=ca.id,
+ tag_time=1552854211,
+ tag_timezone=0,
+ )
+ outstream = StringIO()
+ porcelain.show(self.repo, objects=[b"refs/tags/tryme"], outstream=outstream)
+ self.maxDiff = None
+ self.assertMultiLineEqual(
+ outstream.getvalue(),
+ """\
+Tagger: foo <foo@bar.com>
+Date: Sun Mar 17 2019 20:23:31 +0000
+
+bar
+
+--------------------------------------------------
+commit: 344da06c1bb85901270b3e8875c988a027ec087d
+Author: Test Author <test@nodomain.com>
+Committer: Test Committer <test@nodomain.com>
+Date: Fri Jan 01 2010 00:00:00 +0000
+
+Test message.
+
+diff --git a/somename b/somename
+new file mode 100644
+index 0000000..ea5c7bf
+--- /dev/null
++++ b/somename
+@@ -0,0 +1 @@
++The Foo
+""",
+ )
+
+ def test_commit_with_change(self):
+ a = Blob.from_string(b"The Foo\n")
+ ta = Tree()
+ ta.add(b"somename", 0o100644, a.id)
+ ca = make_commit(tree=ta.id)
+ b = Blob.from_string(b"The Bar\n")
+ tb = Tree()
+ tb.add(b"somename", 0o100644, b.id)
+ cb = make_commit(tree=tb.id, parents=[ca.id])
+ self.repo.object_store.add_objects(
+ [
+ (a, None),
+ (b, None),
+ (ta, None),
+ (tb, None),
+ (ca, None),
+ (cb, None),
+ ]
+ )
+ outstream = StringIO()
+ porcelain.show(self.repo.path, objects=[cb.id], outstream=outstream)
+ self.assertMultiLineEqual(
+ outstream.getvalue(),
+ """\
+--------------------------------------------------
+commit: 2c6b6c9cb72c130956657e1fdae58e5b103744fa
+Author: Test Author <test@nodomain.com>
+Committer: Test Committer <test@nodomain.com>
+Date: Fri Jan 01 2010 00:00:00 +0000
+
+Test message.
+
+diff --git a/somename b/somename
+index ea5c7bf..fd38bcb 100644
+--- a/somename
++++ b/somename
+@@ -1 +1 @@
+-The Foo
++The Bar
+""",
+ )
+
+
+class SymbolicRefTests(PorcelainTestCase):
+ def test_set_wrong_symbolic_ref(self):
+ c1, c2, c3 = build_commit_graph(
+ self.repo.object_store, [[1], [2, 1], [3, 1, 2]]
+ )
+ self.repo.refs[b"HEAD"] = c3.id
+
+ self.assertRaises(
+ porcelain.Error, porcelain.symbolic_ref, self.repo.path, b"foobar"
+ )
+
+ def test_set_force_wrong_symbolic_ref(self):
+ c1, c2, c3 = build_commit_graph(
+ self.repo.object_store, [[1], [2, 1], [3, 1, 2]]
+ )
+ self.repo.refs[b"HEAD"] = c3.id
+
+ porcelain.symbolic_ref(self.repo.path, b"force_foobar", force=True)
+
+ # test if we actually changed the file
+ with self.repo.get_named_file("HEAD") as f:
+ new_ref = f.read()
+ self.assertEqual(new_ref, b"ref: refs/heads/force_foobar\n")
+
+ def test_set_symbolic_ref(self):
+ c1, c2, c3 = build_commit_graph(
+ self.repo.object_store, [[1], [2, 1], [3, 1, 2]]
+ )
+ self.repo.refs[b"HEAD"] = c3.id
+
+ porcelain.symbolic_ref(self.repo.path, b"master")
+
+ def test_set_symbolic_ref_other_than_master(self):
+ c1, c2, c3 = build_commit_graph(
+ self.repo.object_store,
+ [[1], [2, 1], [3, 1, 2]],
+ attrs=dict(refs="develop"),
+ )
+ self.repo.refs[b"HEAD"] = c3.id
+ self.repo.refs[b"refs/heads/develop"] = c3.id
+
+ porcelain.symbolic_ref(self.repo.path, b"develop")
+
+ # test if we actually changed the file
+ with self.repo.get_named_file("HEAD") as f:
+ new_ref = f.read()
+ self.assertEqual(new_ref, b"ref: refs/heads/develop\n")
+
+
+class DiffTreeTests(PorcelainTestCase):
+ def test_empty(self):
+ c1, c2, c3 = build_commit_graph(
+ self.repo.object_store, [[1], [2, 1], [3, 1, 2]]
+ )
+ self.repo.refs[b"HEAD"] = c3.id
+ outstream = BytesIO()
+ porcelain.diff_tree(self.repo.path, c2.tree, c3.tree, outstream=outstream)
+ self.assertEqual(outstream.getvalue(), b"")
+
+
+class CommitTreeTests(PorcelainTestCase):
+ def test_simple(self):
+ c1, c2, c3 = build_commit_graph(
+ self.repo.object_store, [[1], [2, 1], [3, 1, 2]]
+ )
+ b = Blob()
+ b.data = b"foo the bar"
+ t = Tree()
+ t.add(b"somename", 0o100644, b.id)
+ self.repo.object_store.add_object(t)
+ self.repo.object_store.add_object(b)
+ sha = porcelain.commit_tree(
+ self.repo.path,
+ t.id,
+ message=b"Withcommit.",
+ author=b"Joe <joe@example.com>",
+ committer=b"Jane <jane@example.com>",
+ )
+ self.assertIsInstance(sha, bytes)
+ self.assertEqual(len(sha), 40)
+
+
+class RevListTests(PorcelainTestCase):
+ def test_simple(self):
+ c1, c2, c3 = build_commit_graph(
+ self.repo.object_store, [[1], [2, 1], [3, 1, 2]]
+ )
+ outstream = BytesIO()
+ porcelain.rev_list(self.repo.path, [c3.id], outstream=outstream)
+ self.assertEqual(
+ c3.id + b"\n" + c2.id + b"\n" + c1.id + b"\n", outstream.getvalue()
+ )
+
+
+@skipIf(
+ platform.python_implementation() == "PyPy" or sys.platform == "win32",
+ "gpgme not easily available or supported on Windows and PyPy",
+)
+class TagCreateSignTests(PorcelainGpgTestCase):
+ def test_default_key(self):
+ c1, c2, c3 = build_commit_graph(
+ self.repo.object_store, [[1], [2, 1], [3, 1, 2]]
+ )
+ self.repo.refs[b"HEAD"] = c3.id
+ cfg = self.repo.get_config()
+ cfg.set(("user",), "signingKey", PorcelainGpgTestCase.DEFAULT_KEY_ID)
+ self.import_default_key()
+
+ porcelain.tag_create(
+ self.repo.path,
+ b"tryme",
+ b"foo <foo@bar.com>",
+ b"bar",
+ annotated=True,
+ sign=True,
+ )
+
+ tags = self.repo.refs.as_dict(b"refs/tags")
+ self.assertEqual(list(tags.keys()), [b"tryme"])
+ tag = self.repo[b"refs/tags/tryme"]
+ self.assertIsInstance(tag, Tag)
+ self.assertEqual(b"foo <foo@bar.com>", tag.tagger)
+ self.assertEqual(b"bar\n", tag.message)
+ self.assertRecentTimestamp(tag.tag_time)
+ tag = self.repo[b"refs/tags/tryme"]
+ # GPG Signatures aren't deterministic, so we can't do a static assertion.
+ tag.verify()
+ tag.verify(keyids=[PorcelainGpgTestCase.DEFAULT_KEY_ID])
+
+ self.import_non_default_key()
+ self.assertRaises(
+ gpg.errors.MissingSignatures,
+ tag.verify,
+ keyids=[PorcelainGpgTestCase.NON_DEFAULT_KEY_ID],
+ )
+
+ tag._chunked_text = [b"bad data", tag._signature]
+ self.assertRaises(
+ gpg.errors.BadSignatures,
+ tag.verify,
+ )
+
+ def test_non_default_key(self):
+ c1, c2, c3 = build_commit_graph(
+ self.repo.object_store, [[1], [2, 1], [3, 1, 2]]
+ )
+ self.repo.refs[b"HEAD"] = c3.id
+ cfg = self.repo.get_config()
+ cfg.set(("user",), "signingKey", PorcelainGpgTestCase.DEFAULT_KEY_ID)
+ self.import_non_default_key()
+
+ porcelain.tag_create(
+ self.repo.path,
+ b"tryme",
+ b"foo <foo@bar.com>",
+ b"bar",
+ annotated=True,
+ sign=PorcelainGpgTestCase.NON_DEFAULT_KEY_ID,
+ )
+
+ tags = self.repo.refs.as_dict(b"refs/tags")
+ self.assertEqual(list(tags.keys()), [b"tryme"])
+ tag = self.repo[b"refs/tags/tryme"]
+ self.assertIsInstance(tag, Tag)
+ self.assertEqual(b"foo <foo@bar.com>", tag.tagger)
+ self.assertEqual(b"bar\n", tag.message)
+ self.assertRecentTimestamp(tag.tag_time)
+ tag = self.repo[b"refs/tags/tryme"]
+ # GPG Signatures aren't deterministic, so we can't do a static assertion.
+ tag.verify()
+
+
+class TagCreateTests(PorcelainTestCase):
+ def test_annotated(self):
+ c1, c2, c3 = build_commit_graph(
+ self.repo.object_store, [[1], [2, 1], [3, 1, 2]]
+ )
+ self.repo.refs[b"HEAD"] = c3.id
+
+ porcelain.tag_create(
+ self.repo.path,
+ b"tryme",
+ b"foo <foo@bar.com>",
+ b"bar",
+ annotated=True,
+ )
+
+ tags = self.repo.refs.as_dict(b"refs/tags")
+ self.assertEqual(list(tags.keys()), [b"tryme"])
+ tag = self.repo[b"refs/tags/tryme"]
+ self.assertIsInstance(tag, Tag)
+ self.assertEqual(b"foo <foo@bar.com>", tag.tagger)
+ self.assertEqual(b"bar\n", tag.message)
+ self.assertRecentTimestamp(tag.tag_time)
+
+ def test_unannotated(self):
+ c1, c2, c3 = build_commit_graph(
+ self.repo.object_store, [[1], [2, 1], [3, 1, 2]]
+ )
+ self.repo.refs[b"HEAD"] = c3.id
+
+ porcelain.tag_create(self.repo.path, b"tryme", annotated=False)
+
+ tags = self.repo.refs.as_dict(b"refs/tags")
+ self.assertEqual(list(tags.keys()), [b"tryme"])
+ self.repo[b"refs/tags/tryme"]
+ self.assertEqual(list(tags.values()), [self.repo.head()])
+
+ def test_unannotated_unicode(self):
+ c1, c2, c3 = build_commit_graph(
+ self.repo.object_store, [[1], [2, 1], [3, 1, 2]]
+ )
+ self.repo.refs[b"HEAD"] = c3.id
+
+ porcelain.tag_create(self.repo.path, "tryme", annotated=False)
+
+ tags = self.repo.refs.as_dict(b"refs/tags")
+ self.assertEqual(list(tags.keys()), [b"tryme"])
+ self.repo[b"refs/tags/tryme"]
+ self.assertEqual(list(tags.values()), [self.repo.head()])
+
+
+class TagListTests(PorcelainTestCase):
+ def test_empty(self):
+ tags = porcelain.tag_list(self.repo.path)
+ self.assertEqual([], tags)
+
+ def test_simple(self):
+ self.repo.refs[b"refs/tags/foo"] = b"aa" * 20
+ self.repo.refs[b"refs/tags/bar/bla"] = b"bb" * 20
+ tags = porcelain.tag_list(self.repo.path)
+
+ self.assertEqual([b"bar/bla", b"foo"], tags)
+
+
+class TagDeleteTests(PorcelainTestCase):
+ def test_simple(self):
+ [c1] = build_commit_graph(self.repo.object_store, [[1]])
+ self.repo[b"HEAD"] = c1.id
+ porcelain.tag_create(self.repo, b"foo")
+ self.assertIn(b"foo", porcelain.tag_list(self.repo))
+ porcelain.tag_delete(self.repo, b"foo")
+ self.assertNotIn(b"foo", porcelain.tag_list(self.repo))
+
+
+class ResetTests(PorcelainTestCase):
+ def test_hard_head(self):
+ fullpath = os.path.join(self.repo.path, "foo")
+ with open(fullpath, "w") as f:
+ f.write("BAR")
+ porcelain.add(self.repo.path, paths=[fullpath])
+ porcelain.commit(
+ self.repo.path,
+ message=b"Some message",
+ committer=b"Jane <jane@example.com>",
+ author=b"John <john@example.com>",
+ )
+
+ with open(os.path.join(self.repo.path, "foo"), "wb") as f:
+ f.write(b"OOH")
+
+ porcelain.reset(self.repo, "hard", b"HEAD")
+
+ index = self.repo.open_index()
+ changes = list(
+ tree_changes(
+ self.repo,
+ index.commit(self.repo.object_store),
+ self.repo[b"HEAD"].tree,
+ )
+ )
+
+ self.assertEqual([], changes)
+
+ def test_hard_commit(self):
+ fullpath = os.path.join(self.repo.path, "foo")
+ with open(fullpath, "w") as f:
+ f.write("BAR")
+ porcelain.add(self.repo.path, paths=[fullpath])
+ sha = porcelain.commit(
+ self.repo.path,
+ message=b"Some message",
+ committer=b"Jane <jane@example.com>",
+ author=b"John <john@example.com>",
+ )
+
+ with open(fullpath, "wb") as f:
+ f.write(b"BAZ")
+ porcelain.add(self.repo.path, paths=[fullpath])
+ porcelain.commit(
+ self.repo.path,
+ message=b"Some other message",
+ committer=b"Jane <jane@example.com>",
+ author=b"John <john@example.com>",
+ )
+
+ porcelain.reset(self.repo, "hard", sha)
+
+ index = self.repo.open_index()
+ changes = list(
+ tree_changes(
+ self.repo,
+ index.commit(self.repo.object_store),
+ self.repo[sha].tree,
+ )
+ )
+
+ self.assertEqual([], changes)
+
+
+class ResetFileTests(PorcelainTestCase):
+ def test_reset_modify_file_to_commit(self):
+ file = "foo"
+ full_path = os.path.join(self.repo.path, file)
+
+ with open(full_path, "w") as f:
+ f.write("hello")
+ porcelain.add(self.repo, paths=[full_path])
+ sha = porcelain.commit(
+ self.repo,
+ message=b"unitest",
+ committer=b"Jane <jane@example.com>",
+ author=b"John <john@example.com>",
+ )
+ with open(full_path, "a") as f:
+ f.write("something new")
+ porcelain.reset_file(self.repo, file, target=sha)
+
+ with open(full_path) as f:
+ self.assertEqual("hello", f.read())
+
+ def test_reset_remove_file_to_commit(self):
+ file = "foo"
+ full_path = os.path.join(self.repo.path, file)
+
+ with open(full_path, "w") as f:
+ f.write("hello")
+ porcelain.add(self.repo, paths=[full_path])
+ sha = porcelain.commit(
+ self.repo,
+ message=b"unitest",
+ committer=b"Jane <jane@example.com>",
+ author=b"John <john@example.com>",
+ )
+ os.remove(full_path)
+ porcelain.reset_file(self.repo, file, target=sha)
+
+ with open(full_path) as f:
+ self.assertEqual("hello", f.read())
+
+ def test_resetfile_with_dir(self):
+ os.mkdir(os.path.join(self.repo.path, "new_dir"))
+ full_path = os.path.join(self.repo.path, "new_dir", "foo")
+
+ with open(full_path, "w") as f:
+ f.write("hello")
+ porcelain.add(self.repo, paths=[full_path])
+ sha = porcelain.commit(
+ self.repo,
+ message=b"unitest",
+ committer=b"Jane <jane@example.com>",
+ author=b"John <john@example.com>",
+ )
+ with open(full_path, "a") as f:
+ f.write("something new")
+ porcelain.commit(
+ self.repo,
+ message=b"unitest 2",
+ committer=b"Jane <jane@example.com>",
+ author=b"John <john@example.com>",
+ )
+ porcelain.reset_file(self.repo, os.path.join("new_dir", "foo"), target=sha)
+
+ with open(full_path) as f:
+ self.assertEqual("hello", f.read())
+
+
+def _commit_file_with_content(repo, filename, content):
+ file_path = os.path.join(repo.path, filename)
+
+ with open(file_path, "w") as f:
+ f.write(content)
+ porcelain.add(repo, paths=[file_path])
+ sha = porcelain.commit(
+ repo,
+ message=b"add " + filename.encode(),
+ committer=b"Jane <jane@example.com>",
+ author=b"John <john@example.com>",
+ )
+
+ return sha, file_path
+
+
+class CheckoutTests(PorcelainTestCase):
+ def setUp(self):
+ super().setUp()
+ self._sha, self._foo_path = _commit_file_with_content(
+ self.repo, "foo", "hello\n"
+ )
+ porcelain.branch_create(self.repo, "uni")
+
+ def test_checkout_to_existing_branch(self):
+ self.assertEqual(b"master", porcelain.active_branch(self.repo))
+ porcelain.checkout_branch(self.repo, b"uni")
+ self.assertEqual(b"uni", porcelain.active_branch(self.repo))
+
+ def test_checkout_to_non_existing_branch(self):
+ self.assertEqual(b"master", porcelain.active_branch(self.repo))
+
+ with self.assertRaises(KeyError):
+ porcelain.checkout_branch(self.repo, b"bob")
+
+ self.assertEqual(b"master", porcelain.active_branch(self.repo))
+
+ def test_checkout_to_branch_with_modified_files(self):
+ with open(self._foo_path, "a") as f:
+ f.write("new message\n")
+ porcelain.add(self.repo, paths=[self._foo_path])
+
+ status = list(porcelain.status(self.repo))
+ self.assertEqual(
+ [{"add": [], "delete": [], "modify": [b"foo"]}, [], []], status
+ )
+
+ # Both branches have file 'foo' checkout should be fine.
+ porcelain.checkout_branch(self.repo, b"uni")
+ self.assertEqual(b"uni", porcelain.active_branch(self.repo))
+
+ status = list(porcelain.status(self.repo))
+ self.assertEqual(
+ [{"add": [], "delete": [], "modify": [b"foo"]}, [], []], status
+ )
+
+ def test_checkout_with_deleted_files(self):
+ porcelain.remove(self.repo.path, [os.path.join(self.repo.path, "foo")])
+ status = list(porcelain.status(self.repo))
+ self.assertEqual(
+ [{"add": [], "delete": [b"foo"], "modify": []}, [], []], status
+ )
+
+ # Both branches have file 'foo' checkout should be fine.
+ porcelain.checkout_branch(self.repo, b"uni")
+ self.assertEqual(b"uni", porcelain.active_branch(self.repo))
+
+ status = list(porcelain.status(self.repo))
+ self.assertEqual(
+ [{"add": [], "delete": [b"foo"], "modify": []}, [], []], status
+ )
+
+ def test_checkout_to_branch_with_added_files(self):
+ file_path = os.path.join(self.repo.path, "bar")
+
+ with open(file_path, "w") as f:
+ f.write("bar content\n")
+ porcelain.add(self.repo, paths=[file_path])
+ status = list(porcelain.status(self.repo))
+ self.assertEqual(
+ [{"add": [b"bar"], "delete": [], "modify": []}, [], []], status
+ )
+
+ # Both branches have file 'foo' checkout should be fine.
+ porcelain.checkout_branch(self.repo, b"uni")
+ self.assertEqual(b"uni", porcelain.active_branch(self.repo))
+
+ status = list(porcelain.status(self.repo))
+ self.assertEqual(
+ [{"add": [b"bar"], "delete": [], "modify": []}, [], []], status
+ )
+
+ def test_checkout_to_branch_with_modified_file_not_present(self):
+ # Commit a new file that the other branch doesn't have.
+ _, nee_path = _commit_file_with_content(self.repo, "nee", "Good content\n")
+
+ # Modify the file the other branch doesn't have.
+ with open(nee_path, "a") as f:
+ f.write("bar content\n")
+ porcelain.add(self.repo, paths=[nee_path])
+ status = list(porcelain.status(self.repo))
+ self.assertEqual(
+ [{"add": [], "delete": [], "modify": [b"nee"]}, [], []], status
+ )
+
+ # 'uni' branch doesn't have 'nee' and it has been modified, should result in the checkout being aborted.
+ with self.assertRaises(CheckoutError):
+ porcelain.checkout_branch(self.repo, b"uni")
+
+ self.assertEqual(b"master", porcelain.active_branch(self.repo))
+
+ status = list(porcelain.status(self.repo))
+ self.assertEqual(
+ [{"add": [], "delete": [], "modify": [b"nee"]}, [], []], status
+ )
+
+ def test_checkout_to_branch_with_modified_file_not_present_forced(self):
+ # Commit a new file that the other branch doesn't have.
+ _, nee_path = _commit_file_with_content(self.repo, "nee", "Good content\n")
+
+ # Modify the file the other branch doesn't have.
+ with open(nee_path, "a") as f:
+ f.write("bar content\n")
+ porcelain.add(self.repo, paths=[nee_path])
+ status = list(porcelain.status(self.repo))
+ self.assertEqual(
+ [{"add": [], "delete": [], "modify": [b"nee"]}, [], []], status
+ )
+
+ # 'uni' branch doesn't have 'nee' and it has been modified, but we force to reset the entire index.
+ porcelain.checkout_branch(self.repo, b"uni", force=True)
+
+ self.assertEqual(b"uni", porcelain.active_branch(self.repo))
+
+ status = list(porcelain.status(self.repo))
+ self.assertEqual([{"add": [], "delete": [], "modify": []}, [], []], status)
+
+ def test_checkout_to_branch_with_unstaged_files(self):
+ # Edit `foo`.
+ with open(self._foo_path, "a") as f:
+ f.write("new message")
+
+ status = list(porcelain.status(self.repo))
+ self.assertEqual(
+ [{"add": [], "delete": [], "modify": []}, [b"foo"], []], status
+ )
+
+ porcelain.checkout_branch(self.repo, b"uni")
+
+ status = list(porcelain.status(self.repo))
+ self.assertEqual(
+ [{"add": [], "delete": [], "modify": []}, [b"foo"], []], status
+ )
+
+ def test_checkout_to_branch_with_untracked_files(self):
+ with open(os.path.join(self.repo.path, "neu"), "a") as f:
+ f.write("new message\n")
+
+ status = list(porcelain.status(self.repo))
+ self.assertEqual([{"add": [], "delete": [], "modify": []}, [], ["neu"]], status)
+
+ porcelain.checkout_branch(self.repo, b"uni")
+
+ status = list(porcelain.status(self.repo))
+ self.assertEqual([{"add": [], "delete": [], "modify": []}, [], ["neu"]], status)
+
+ def test_checkout_to_branch_with_new_files(self):
+ porcelain.checkout_branch(self.repo, b"uni")
+ sub_directory = os.path.join(self.repo.path, "sub1")
+ os.mkdir(sub_directory)
+ for index in range(5):
+ _commit_file_with_content(
+ self.repo, "new_file_" + str(index + 1), "Some content\n"
+ )
+ _commit_file_with_content(
+ self.repo,
+ os.path.join("sub1", "new_file_" + str(index + 10)),
+ "Good content\n",
+ )
+
+ status = list(porcelain.status(self.repo))
+ self.assertEqual([{"add": [], "delete": [], "modify": []}, [], []], status)
+
+ porcelain.checkout_branch(self.repo, b"master")
+ self.assertEqual(b"master", porcelain.active_branch(self.repo))
+ status = list(porcelain.status(self.repo))
+ self.assertEqual([{"add": [], "delete": [], "modify": []}, [], []], status)
+
+ porcelain.checkout_branch(self.repo, b"uni")
+ self.assertEqual(b"uni", porcelain.active_branch(self.repo))
+ status = list(porcelain.status(self.repo))
+ self.assertEqual([{"add": [], "delete": [], "modify": []}, [], []], status)
+
+ def test_checkout_to_branch_with_file_in_sub_directory(self):
+ sub_directory = os.path.join(self.repo.path, "sub1", "sub2")
+ os.makedirs(sub_directory)
+
+ sub_directory_file = os.path.join(sub_directory, "neu")
+ with open(sub_directory_file, "w") as f:
+ f.write("new message\n")
+
+ porcelain.add(self.repo, paths=[sub_directory_file])
+ porcelain.commit(
+ self.repo,
+ message=b"add " + sub_directory_file.encode(),
+ committer=b"Jane <jane@example.com>",
+ author=b"John <john@example.com>",
+ )
+ status = list(porcelain.status(self.repo))
+ self.assertEqual([{"add": [], "delete": [], "modify": []}, [], []], status)
+
+ self.assertTrue(os.path.isdir(sub_directory))
+ self.assertTrue(os.path.isdir(os.path.dirname(sub_directory)))
+
+ porcelain.checkout_branch(self.repo, b"uni")
+
+ status = list(porcelain.status(self.repo))
+ self.assertEqual([{"add": [], "delete": [], "modify": []}, [], []], status)
+
+ self.assertFalse(os.path.isdir(sub_directory))
+ self.assertFalse(os.path.isdir(os.path.dirname(sub_directory)))
+
+ porcelain.checkout_branch(self.repo, b"master")
+
+ self.assertTrue(os.path.isdir(sub_directory))
+ self.assertTrue(os.path.isdir(os.path.dirname(sub_directory)))
+
+ def test_checkout_to_branch_with_multiple_files_in_sub_directory(self):
+ sub_directory = os.path.join(self.repo.path, "sub1", "sub2")
+ os.makedirs(sub_directory)
+
+ sub_directory_file_1 = os.path.join(sub_directory, "neu")
+ with open(sub_directory_file_1, "w") as f:
+ f.write("new message\n")
+
+ sub_directory_file_2 = os.path.join(sub_directory, "gus")
+ with open(sub_directory_file_2, "w") as f:
+ f.write("alternative message\n")
+
+ porcelain.add(self.repo, paths=[sub_directory_file_1, sub_directory_file_2])
+ porcelain.commit(
+ self.repo,
+ message=b"add files neu and gus.",
+ committer=b"Jane <jane@example.com>",
+ author=b"John <john@example.com>",
+ )
+ status = list(porcelain.status(self.repo))
+ self.assertEqual([{"add": [], "delete": [], "modify": []}, [], []], status)
+
+ self.assertTrue(os.path.isdir(sub_directory))
+ self.assertTrue(os.path.isdir(os.path.dirname(sub_directory)))
+
+ porcelain.checkout_branch(self.repo, b"uni")
+
+ status = list(porcelain.status(self.repo))
+ self.assertEqual([{"add": [], "delete": [], "modify": []}, [], []], status)
+
+ self.assertFalse(os.path.isdir(sub_directory))
+ self.assertFalse(os.path.isdir(os.path.dirname(sub_directory)))
+
+ def _commit_something_wrong(self):
+ with open(self._foo_path, "a") as f:
+ f.write("something wrong")
+
+ porcelain.add(self.repo, paths=[self._foo_path])
+ return porcelain.commit(
+ self.repo,
+ message=b"I may added something wrong",
+ committer=b"Jane <jane@example.com>",
+ author=b"John <john@example.com>",
+ )
+
+ def test_checkout_to_commit_sha(self):
+ self._commit_something_wrong()
+
+ porcelain.checkout_branch(self.repo, self._sha)
+ self.assertEqual(self._sha, self.repo.head())
+
+ def test_checkout_to_head(self):
+ new_sha = self._commit_something_wrong()
+
+ porcelain.checkout_branch(self.repo, b"HEAD")
+ self.assertEqual(new_sha, self.repo.head())
+
+ def _checkout_remote_branch(self):
+ errstream = BytesIO()
+ outstream = BytesIO()
+
+ porcelain.commit(
+ repo=self.repo.path,
+ message=b"init",
+ author=b"author <email>",
+ committer=b"committer <email>",
+ )
+
+ # Setup target repo cloned from temp test repo
+ clone_path = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, clone_path)
+ target_repo = porcelain.clone(
+ self.repo.path, target=clone_path, errstream=errstream
+ )
+ try:
+ self.assertEqual(target_repo[b"HEAD"], self.repo[b"HEAD"])
+ finally:
+ target_repo.close()
+
+ # create a second file to be pushed back to origin
+ handle, fullpath = tempfile.mkstemp(dir=clone_path)
+ os.close(handle)
+ porcelain.add(repo=clone_path, paths=[fullpath])
+ porcelain.commit(
+ repo=clone_path,
+ message=b"push",
+ author=b"author <email>",
+ committer=b"committer <email>",
+ )
+
+ # Setup a non-checked out branch in the remote
+ refs_path = b"refs/heads/foo"
+ new_id = self.repo[b"HEAD"].id
+ self.assertNotEqual(new_id, ZERO_SHA)
+ self.repo.refs[refs_path] = new_id
+
+ # Push to the remote
+ porcelain.push(
+ clone_path,
+ "origin",
+ b"HEAD:" + refs_path,
+ outstream=outstream,
+ errstream=errstream,
+ )
+
+ self.assertEqual(
+ target_repo.refs[b"refs/remotes/origin/foo"],
+ target_repo.refs[b"HEAD"],
+ )
+
+ porcelain.checkout_branch(target_repo, b"origin/foo")
+ original_id = target_repo[b"HEAD"].id
+ uni_id = target_repo[b"refs/remotes/origin/uni"].id
+
+ expected_refs = {
+ b"HEAD": original_id,
+ b"refs/heads/master": original_id,
+ b"refs/heads/foo": original_id,
+ b"refs/remotes/origin/foo": original_id,
+ b"refs/remotes/origin/uni": uni_id,
+ b"refs/remotes/origin/HEAD": new_id,
+ b"refs/remotes/origin/master": new_id,
+ }
+ self.assertEqual(expected_refs, target_repo.get_refs())
+
+ return target_repo
+
+ def test_checkout_remote_branch(self):
+ repo = self._checkout_remote_branch()
+ repo.close()
+
+ def test_checkout_remote_branch_then_master_then_remote_branch_again(self):
+ target_repo = self._checkout_remote_branch()
+ self.assertEqual(b"foo", porcelain.active_branch(target_repo))
+ _commit_file_with_content(target_repo, "bar", "something\n")
+ self.assertTrue(os.path.isfile(os.path.join(target_repo.path, "bar")))
+
+ porcelain.checkout_branch(target_repo, b"master")
+
+ self.assertEqual(b"master", porcelain.active_branch(target_repo))
+ self.assertFalse(os.path.isfile(os.path.join(target_repo.path, "bar")))
+
+ porcelain.checkout_branch(target_repo, b"origin/foo")
+
+ self.assertEqual(b"foo", porcelain.active_branch(target_repo))
+ self.assertTrue(os.path.isfile(os.path.join(target_repo.path, "bar")))
+
+ target_repo.close()
+
+
+class SubmoduleTests(PorcelainTestCase):
+ def test_empty(self):
+ porcelain.commit(
+ repo=self.repo.path,
+ message=b"init",
+ author=b"author <email>",
+ committer=b"committer <email>",
+ )
+
+ self.assertEqual([], list(porcelain.submodule_list(self.repo)))
+
+ def test_add(self):
+ porcelain.submodule_add(self.repo, "../bar.git", "bar")
+ with open("%s/.gitmodules" % self.repo.path) as f:
+ self.assertEqual(
+ """\
+[submodule "bar"]
+\turl = ../bar.git
+\tpath = bar
+""",
+ f.read(),
+ )
+
+ def test_init(self):
+ porcelain.submodule_add(self.repo, "../bar.git", "bar")
+ porcelain.submodule_init(self.repo)
+
+
+class PushTests(PorcelainTestCase):
+ def test_simple(self):
+ """Basic test of porcelain push where self.repo is the remote. First
+ clone the remote, commit a file to the clone, then push the changes
+ back to the remote.
+ """
+ outstream = BytesIO()
+ errstream = BytesIO()
+
+ porcelain.commit(
+ repo=self.repo.path,
+ message=b"init",
+ author=b"author <email>",
+ committer=b"committer <email>",
+ )
+
+ # Setup target repo cloned from temp test repo
+ clone_path = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, clone_path)
+ target_repo = porcelain.clone(
+ self.repo.path, target=clone_path, errstream=errstream
+ )
+ try:
+ self.assertEqual(target_repo[b"HEAD"], self.repo[b"HEAD"])
+ finally:
+ target_repo.close()
+
+ # create a second file to be pushed back to origin
+ handle, fullpath = tempfile.mkstemp(dir=clone_path)
+ os.close(handle)
+ porcelain.add(repo=clone_path, paths=[fullpath])
+ porcelain.commit(
+ repo=clone_path,
+ message=b"push",
+ author=b"author <email>",
+ committer=b"committer <email>",
+ )
+
+ # Setup a non-checked out branch in the remote
+ refs_path = b"refs/heads/foo"
+ new_id = self.repo[b"HEAD"].id
+ self.assertNotEqual(new_id, ZERO_SHA)
+ self.repo.refs[refs_path] = new_id
+
+ # Push to the remote
+ porcelain.push(
+ clone_path,
+ "origin",
+ b"HEAD:" + refs_path,
+ outstream=outstream,
+ errstream=errstream,
+ )
+
+ self.assertEqual(
+ target_repo.refs[b"refs/remotes/origin/foo"],
+ target_repo.refs[b"HEAD"],
+ )
+
+ # Check that the target and source
+ with Repo(clone_path) as r_clone:
+ self.assertEqual(
+ {
+ b"HEAD": new_id,
+ b"refs/heads/foo": r_clone[b"HEAD"].id,
+ b"refs/heads/master": new_id,
+ },
+ self.repo.get_refs(),
+ )
+ self.assertEqual(r_clone[b"HEAD"].id, self.repo[refs_path].id)
+
+ # Get the change in the target repo corresponding to the add
+ # this will be in the foo branch.
+ change = next(
+ iter(
+ tree_changes(
+ self.repo,
+ self.repo[b"HEAD"].tree,
+ self.repo[b"refs/heads/foo"].tree,
+ )
+ )
+ )
+ self.assertEqual(
+ os.path.basename(fullpath), change.new.path.decode("ascii")
+ )
+
+ def test_local_missing(self):
+ """Pushing a new branch."""
+ outstream = BytesIO()
+ errstream = BytesIO()
+
+ # Setup target repo cloned from temp test repo
+ clone_path = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, clone_path)
+ target_repo = porcelain.init(clone_path)
+ target_repo.close()
+
+ self.assertRaises(
+ porcelain.Error,
+ porcelain.push,
+ self.repo,
+ clone_path,
+ b"HEAD:refs/heads/master",
+ outstream=outstream,
+ errstream=errstream,
+ )
+
+ def test_new(self):
+ """Pushing a new branch."""
+ outstream = BytesIO()
+ errstream = BytesIO()
+
+ # Setup target repo cloned from temp test repo
+ clone_path = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, clone_path)
+ target_repo = porcelain.init(clone_path)
+ target_repo.close()
+
+ # create a second file to be pushed back to origin
+ handle, fullpath = tempfile.mkstemp(dir=clone_path)
+ os.close(handle)
+ porcelain.add(repo=clone_path, paths=[fullpath])
+ new_id = porcelain.commit(
+ repo=self.repo,
+ message=b"push",
+ author=b"author <email>",
+ committer=b"committer <email>",
+ )
+
+ # Push to the remote
+ porcelain.push(
+ self.repo,
+ clone_path,
+ b"HEAD:refs/heads/master",
+ outstream=outstream,
+ errstream=errstream,
+ )
+
+ with Repo(clone_path) as r_clone:
+ self.assertEqual(
+ {
+ b"HEAD": new_id,
+ b"refs/heads/master": new_id,
+ },
+ r_clone.get_refs(),
+ )
+
+ def test_delete(self):
+ """Basic test of porcelain push, removing a branch."""
+ outstream = BytesIO()
+ errstream = BytesIO()
+
+ porcelain.commit(
+ repo=self.repo.path,
+ message=b"init",
+ author=b"author <email>",
+ committer=b"committer <email>",
+ )
+
+ # Setup target repo cloned from temp test repo
+ clone_path = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, clone_path)
+ target_repo = porcelain.clone(
+ self.repo.path, target=clone_path, errstream=errstream
+ )
+ target_repo.close()
+
+ # Setup a non-checked out branch in the remote
+ refs_path = b"refs/heads/foo"
+ new_id = self.repo[b"HEAD"].id
+ self.assertNotEqual(new_id, ZERO_SHA)
+ self.repo.refs[refs_path] = new_id
+
+ # Push to the remote
+ porcelain.push(
+ clone_path,
+ self.repo.path,
+ b":" + refs_path,
+ outstream=outstream,
+ errstream=errstream,
+ )
+
+ self.assertEqual(
+ {
+ b"HEAD": new_id,
+ b"refs/heads/master": new_id,
+ },
+ self.repo.get_refs(),
+ )
+
+ def test_diverged(self):
+ outstream = BytesIO()
+ errstream = BytesIO()
+
+ porcelain.commit(
+ repo=self.repo.path,
+ message=b"init",
+ author=b"author <email>",
+ committer=b"committer <email>",
+ )
+
+ # Setup target repo cloned from temp test repo
+ clone_path = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, clone_path)
+ target_repo = porcelain.clone(
+ self.repo.path, target=clone_path, errstream=errstream
+ )
+ target_repo.close()
+
+ remote_id = porcelain.commit(
+ repo=self.repo.path,
+ message=b"remote change",
+ author=b"author <email>",
+ committer=b"committer <email>",
+ )
+
+ local_id = porcelain.commit(
+ repo=clone_path,
+ message=b"local change",
+ author=b"author <email>",
+ committer=b"committer <email>",
+ )
+
+ outstream = BytesIO()
+ errstream = BytesIO()
+
+ # Push to the remote
+ self.assertRaises(
+ porcelain.DivergedBranches,
+ porcelain.push,
+ clone_path,
+ self.repo.path,
+ b"refs/heads/master",
+ outstream=outstream,
+ errstream=errstream,
+ )
+
+ self.assertEqual(
+ {
+ b"HEAD": remote_id,
+ b"refs/heads/master": remote_id,
+ },
+ self.repo.get_refs(),
+ )
+
+ self.assertEqual(b"", outstream.getvalue())
+ self.assertEqual(b"", errstream.getvalue())
+
+ outstream = BytesIO()
+ errstream = BytesIO()
+
+ # Push to the remote with --force
+ porcelain.push(
+ clone_path,
+ self.repo.path,
+ b"refs/heads/master",
+ outstream=outstream,
+ errstream=errstream,
+ force=True,
+ )
+
+ self.assertEqual(
+ {
+ b"HEAD": local_id,
+ b"refs/heads/master": local_id,
+ },
+ self.repo.get_refs(),
+ )
+
+ self.assertEqual(b"", outstream.getvalue())
+ self.assertTrue(re.match(b"Push to .* successful.\n", errstream.getvalue()))
+
+
+class PullTests(PorcelainTestCase):
+ def setUp(self):
+ super().setUp()
+ # create a file for initial commit
+ handle, fullpath = tempfile.mkstemp(dir=self.repo.path)
+ os.close(handle)
+ porcelain.add(repo=self.repo.path, paths=fullpath)
+ porcelain.commit(
+ repo=self.repo.path,
+ message=b"test",
+ author=b"test <email>",
+ committer=b"test <email>",
+ )
+
+ # Setup target repo
+ self.target_path = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, self.target_path)
+ target_repo = porcelain.clone(
+ self.repo.path, target=self.target_path, errstream=BytesIO()
+ )
+ target_repo.close()
+
+ # create a second file to be pushed
+ handle, fullpath = tempfile.mkstemp(dir=self.repo.path)
+ os.close(handle)
+ porcelain.add(repo=self.repo.path, paths=fullpath)
+ porcelain.commit(
+ repo=self.repo.path,
+ message=b"test2",
+ author=b"test2 <email>",
+ committer=b"test2 <email>",
+ )
+
+ self.assertIn(b"refs/heads/master", self.repo.refs)
+ self.assertIn(b"refs/heads/master", target_repo.refs)
+
+ def test_simple(self):
+ outstream = BytesIO()
+ errstream = BytesIO()
+
+ # Pull changes into the cloned repo
+ porcelain.pull(
+ self.target_path,
+ self.repo.path,
+ b"refs/heads/master",
+ outstream=outstream,
+ errstream=errstream,
+ )
+
+ # Check the target repo for pushed changes
+ with Repo(self.target_path) as r:
+ self.assertEqual(r[b"HEAD"].id, self.repo[b"HEAD"].id)
+
+ def test_diverged(self):
+ outstream = BytesIO()
+ errstream = BytesIO()
+
+ c3a = porcelain.commit(
+ repo=self.target_path,
+ message=b"test3a",
+ author=b"test2 <email>",
+ committer=b"test2 <email>",
+ )
+
+ porcelain.commit(
+ repo=self.repo.path,
+ message=b"test3b",
+ author=b"test2 <email>",
+ committer=b"test2 <email>",
+ )
+
+ # Pull changes into the cloned repo
+ self.assertRaises(
+ porcelain.DivergedBranches,
+ porcelain.pull,
+ self.target_path,
+ self.repo.path,
+ b"refs/heads/master",
+ outstream=outstream,
+ errstream=errstream,
+ )
+
+ # Check the target repo for pushed changes
+ with Repo(self.target_path) as r:
+ self.assertEqual(r[b"refs/heads/master"].id, c3a)
+
+ self.assertRaises(
+ NotImplementedError,
+ porcelain.pull,
+ self.target_path,
+ self.repo.path,
+ b"refs/heads/master",
+ outstream=outstream,
+ errstream=errstream,
+ fast_forward=False,
+ )
+
+ # Check the target repo for pushed changes
+ with Repo(self.target_path) as r:
+ self.assertEqual(r[b"refs/heads/master"].id, c3a)
+
+ def test_no_refspec(self):
+ outstream = BytesIO()
+ errstream = BytesIO()
+
+ # Pull changes into the cloned repo
+ porcelain.pull(
+ self.target_path,
+ self.repo.path,
+ outstream=outstream,
+ errstream=errstream,
+ )
+
+ # Check the target repo for pushed changes
+ with Repo(self.target_path) as r:
+ self.assertEqual(r[b"HEAD"].id, self.repo[b"HEAD"].id)
+
+ def test_no_remote_location(self):
+ outstream = BytesIO()
+ errstream = BytesIO()
+
+ # Pull changes into the cloned repo
+ porcelain.pull(
+ self.target_path,
+ refspecs=b"refs/heads/master",
+ outstream=outstream,
+ errstream=errstream,
+ )
+
+ # Check the target repo for pushed changes
+ with Repo(self.target_path) as r:
+ self.assertEqual(r[b"HEAD"].id, self.repo[b"HEAD"].id)
+
+
+class StatusTests(PorcelainTestCase):
+ def test_empty(self):
+ results = porcelain.status(self.repo)
+ self.assertEqual({"add": [], "delete": [], "modify": []}, results.staged)
+ self.assertEqual([], results.unstaged)
+
+ def test_status_base(self):
+ """Integration test for `status` functionality."""
+ # Commit a dummy file then modify it
+ fullpath = os.path.join(self.repo.path, "foo")
+ with open(fullpath, "w") as f:
+ f.write("origstuff")
+
+ porcelain.add(repo=self.repo.path, paths=[fullpath])
+ porcelain.commit(
+ repo=self.repo.path,
+ message=b"test status",
+ author=b"author <email>",
+ committer=b"committer <email>",
+ )
+
+ # modify access and modify time of path
+ os.utime(fullpath, (0, 0))
+
+ with open(fullpath, "wb") as f:
+ f.write(b"stuff")
+
+ # Make a dummy file and stage it
+ filename_add = "bar"
+ fullpath = os.path.join(self.repo.path, filename_add)
+ with open(fullpath, "w") as f:
+ f.write("stuff")
+ porcelain.add(repo=self.repo.path, paths=fullpath)
+
+ results = porcelain.status(self.repo)
+
+ self.assertEqual(results.staged["add"][0], filename_add.encode("ascii"))
+ self.assertEqual(results.unstaged, [b"foo"])
+
+ def test_status_all(self):
+ del_path = os.path.join(self.repo.path, "foo")
+ mod_path = os.path.join(self.repo.path, "bar")
+ add_path = os.path.join(self.repo.path, "baz")
+ us_path = os.path.join(self.repo.path, "blye")
+ ut_path = os.path.join(self.repo.path, "blyat")
+ with open(del_path, "w") as f:
+ f.write("origstuff")
+ with open(mod_path, "w") as f:
+ f.write("origstuff")
+ with open(us_path, "w") as f:
+ f.write("origstuff")
+ porcelain.add(repo=self.repo.path, paths=[del_path, mod_path, us_path])
+ porcelain.commit(
+ repo=self.repo.path,
+ message=b"test status",
+ author=b"author <email>",
+ committer=b"committer <email>",
+ )
+ porcelain.remove(self.repo.path, [del_path])
+ with open(add_path, "w") as f:
+ f.write("origstuff")
+ with open(mod_path, "w") as f:
+ f.write("more_origstuff")
+ with open(us_path, "w") as f:
+ f.write("more_origstuff")
+ porcelain.add(repo=self.repo.path, paths=[add_path, mod_path])
+ with open(us_path, "w") as f:
+ f.write("\norigstuff")
+ with open(ut_path, "w") as f:
+ f.write("origstuff")
+ results = porcelain.status(self.repo.path)
+ self.assertDictEqual(
+ {"add": [b"baz"], "delete": [b"foo"], "modify": [b"bar"]},
+ results.staged,
+ )
+ self.assertListEqual(results.unstaged, [b"blye"])
+ results_no_untracked = porcelain.status(self.repo.path, untracked_files="no")
+ self.assertListEqual(results_no_untracked.untracked, [])
+
+ def test_status_wrong_untracked_files_value(self):
+ with self.assertRaises(ValueError):
+ porcelain.status(self.repo.path, untracked_files="antani")
+
+ def test_status_untracked_path(self):
+ untracked_dir = os.path.join(self.repo_path, "untracked_dir")
+ os.mkdir(untracked_dir)
+ untracked_file = os.path.join(untracked_dir, "untracked_file")
+ with open(untracked_file, "w") as fh:
+ fh.write("untracked")
+
+ _, _, untracked = porcelain.status(self.repo.path, untracked_files="all")
+ self.assertEqual(untracked, ["untracked_dir/untracked_file"])
+
+ def test_status_crlf_mismatch(self):
+ # First make a commit as if the file has been added on a Linux system
+ # or with core.autocrlf=True
+ file_path = os.path.join(self.repo.path, "crlf")
+ with open(file_path, "wb") as f:
+ f.write(b"line1\nline2")
+ porcelain.add(repo=self.repo.path, paths=[file_path])
+ porcelain.commit(
+ repo=self.repo.path,
+ message=b"test status",
+ author=b"author <email>",
+ committer=b"committer <email>",
+ )
+
+ # Then update the file as if it was created by CGit on a Windows
+ # system with core.autocrlf=true
+ with open(file_path, "wb") as f:
+ f.write(b"line1\r\nline2")
+
+ results = porcelain.status(self.repo)
+ self.assertDictEqual({"add": [], "delete": [], "modify": []}, results.staged)
+ self.assertListEqual(results.unstaged, [b"crlf"])
+ self.assertListEqual(results.untracked, [])
+
+ def test_status_autocrlf_true(self):
+ # First make a commit as if the file has been added on a Linux system
+ # or with core.autocrlf=True
+ file_path = os.path.join(self.repo.path, "crlf")
+ with open(file_path, "wb") as f:
+ f.write(b"line1\nline2")
+ porcelain.add(repo=self.repo.path, paths=[file_path])
+ porcelain.commit(
+ repo=self.repo.path,
+ message=b"test status",
+ author=b"author <email>",
+ committer=b"committer <email>",
+ )
+
+ # Then update the file as if it was created by CGit on a Windows
+ # system with core.autocrlf=true
+ with open(file_path, "wb") as f:
+ f.write(b"line1\r\nline2")
+
+ # TODO: It should be set automatically by looking at the configuration
+ c = self.repo.get_config()
+ c.set("core", "autocrlf", True)
+ c.write_to_path()
+
+ results = porcelain.status(self.repo)
+ self.assertDictEqual({"add": [], "delete": [], "modify": []}, results.staged)
+ self.assertListEqual(results.unstaged, [])
+ self.assertListEqual(results.untracked, [])
+
+ def test_status_autocrlf_input(self):
+ # Commit existing file with CRLF
+ file_path = os.path.join(self.repo.path, "crlf-exists")
+ with open(file_path, "wb") as f:
+ f.write(b"line1\r\nline2")
+ porcelain.add(repo=self.repo.path, paths=[file_path])
+ porcelain.commit(
+ repo=self.repo.path,
+ message=b"test status",
+ author=b"author <email>",
+ committer=b"committer <email>",
+ )
+
+ c = self.repo.get_config()
+ c.set("core", "autocrlf", "input")
+ c.write_to_path()
+
+ # Add new (untracked) file
+ file_path = os.path.join(self.repo.path, "crlf-new")
+ with open(file_path, "wb") as f:
+ f.write(b"line1\r\nline2")
+ porcelain.add(repo=self.repo.path, paths=[file_path])
+
+ results = porcelain.status(self.repo)
+ self.assertDictEqual(
+ {"add": [b"crlf-new"], "delete": [], "modify": []}, results.staged
+ )
+ self.assertListEqual(results.unstaged, [])
+ self.assertListEqual(results.untracked, [])
+
+ def test_get_tree_changes_add(self):
+ """Unit test for get_tree_changes add."""
+ # Make a dummy file, stage
+ filename = "bar"
+ fullpath = os.path.join(self.repo.path, filename)
+ with open(fullpath, "w") as f:
+ f.write("stuff")
+ porcelain.add(repo=self.repo.path, paths=fullpath)
+ porcelain.commit(
+ repo=self.repo.path,
+ message=b"test status",
+ author=b"author <email>",
+ committer=b"committer <email>",
+ )
+
+ filename = "foo"
+ fullpath = os.path.join(self.repo.path, filename)
+ with open(fullpath, "w") as f:
+ f.write("stuff")
+ porcelain.add(repo=self.repo.path, paths=fullpath)
+ changes = porcelain.get_tree_changes(self.repo.path)
+
+ self.assertEqual(changes["add"][0], filename.encode("ascii"))
+ self.assertEqual(len(changes["add"]), 1)
+ self.assertEqual(len(changes["modify"]), 0)
+ self.assertEqual(len(changes["delete"]), 0)
+
+ def test_get_tree_changes_modify(self):
+ """Unit test for get_tree_changes modify."""
+ # Make a dummy file, stage, commit, modify
+ filename = "foo"
+ fullpath = os.path.join(self.repo.path, filename)
+ with open(fullpath, "w") as f:
+ f.write("stuff")
+ porcelain.add(repo=self.repo.path, paths=fullpath)
+ porcelain.commit(
+ repo=self.repo.path,
+ message=b"test status",
+ author=b"author <email>",
+ committer=b"committer <email>",
+ )
+ with open(fullpath, "w") as f:
+ f.write("otherstuff")
+ porcelain.add(repo=self.repo.path, paths=fullpath)
+ changes = porcelain.get_tree_changes(self.repo.path)
+
+ self.assertEqual(changes["modify"][0], filename.encode("ascii"))
+ self.assertEqual(len(changes["add"]), 0)
+ self.assertEqual(len(changes["modify"]), 1)
+ self.assertEqual(len(changes["delete"]), 0)
+
+ def test_get_tree_changes_delete(self):
+ """Unit test for get_tree_changes delete."""
+ # Make a dummy file, stage, commit, remove
+ filename = "foo"
+ fullpath = os.path.join(self.repo.path, filename)
+ with open(fullpath, "w") as f:
+ f.write("stuff")
+ porcelain.add(repo=self.repo.path, paths=fullpath)
+ porcelain.commit(
+ repo=self.repo.path,
+ message=b"test status",
+ author=b"author <email>",
+ committer=b"committer <email>",
+ )
+ cwd = os.getcwd()
+ try:
+ os.chdir(self.repo.path)
+ porcelain.remove(repo=self.repo.path, paths=[filename])
+ finally:
+ os.chdir(cwd)
+ changes = porcelain.get_tree_changes(self.repo.path)
+
+ self.assertEqual(changes["delete"][0], filename.encode("ascii"))
+ self.assertEqual(len(changes["add"]), 0)
+ self.assertEqual(len(changes["modify"]), 0)
+ self.assertEqual(len(changes["delete"]), 1)
+
+ def test_get_untracked_paths(self):
+ with open(os.path.join(self.repo.path, ".gitignore"), "w") as f:
+ f.write("ignored\n")
+ with open(os.path.join(self.repo.path, "ignored"), "w") as f:
+ f.write("blah\n")
+ with open(os.path.join(self.repo.path, "notignored"), "w") as f:
+ f.write("blah\n")
+ os.symlink(
+ os.path.join(self.repo.path, os.pardir, "external_target"),
+ os.path.join(self.repo.path, "link"),
+ )
+ self.assertEqual(
+ {"ignored", "notignored", ".gitignore", "link"},
+ set(
+ porcelain.get_untracked_paths(
+ self.repo.path, self.repo.path, self.repo.open_index()
+ )
+ ),
+ )
+ self.assertEqual(
+ {".gitignore", "notignored", "link"},
+ set(porcelain.status(self.repo).untracked),
+ )
+ self.assertEqual(
+ {".gitignore", "notignored", "ignored", "link"},
+ set(porcelain.status(self.repo, ignored=True).untracked),
+ )
+
+ def test_get_untracked_paths_subrepo(self):
+ with open(os.path.join(self.repo.path, ".gitignore"), "w") as f:
+ f.write("nested/\n")
+ with open(os.path.join(self.repo.path, "notignored"), "w") as f:
+ f.write("blah\n")
+
+ subrepo = Repo.init(os.path.join(self.repo.path, "nested"), mkdir=True)
+ with open(os.path.join(subrepo.path, "ignored"), "w") as f:
+ f.write("bleep\n")
+ with open(os.path.join(subrepo.path, "with"), "w") as f:
+ f.write("bloop\n")
+ with open(os.path.join(subrepo.path, "manager"), "w") as f:
+ f.write("blop\n")
+
+ self.assertEqual(
+ {".gitignore", "notignored", os.path.join("nested", "")},
+ set(
+ porcelain.get_untracked_paths(
+ self.repo.path, self.repo.path, self.repo.open_index()
+ )
+ ),
+ )
+ self.assertEqual(
+ {".gitignore", "notignored"},
+ set(
+ porcelain.get_untracked_paths(
+ self.repo.path,
+ self.repo.path,
+ self.repo.open_index(),
+ exclude_ignored=True,
+ )
+ ),
+ )
+ self.assertEqual(
+ {"ignored", "with", "manager"},
+ set(
+ porcelain.get_untracked_paths(
+ subrepo.path, subrepo.path, subrepo.open_index()
+ )
+ ),
+ )
+ self.assertEqual(
+ set(),
+ set(
+ porcelain.get_untracked_paths(
+ subrepo.path,
+ self.repo.path,
+ self.repo.open_index(),
+ )
+ ),
+ )
+ self.assertEqual(
+ {
+ os.path.join("nested", "ignored"),
+ os.path.join("nested", "with"),
+ os.path.join("nested", "manager"),
+ },
+ set(
+ porcelain.get_untracked_paths(
+ self.repo.path,
+ subrepo.path,
+ self.repo.open_index(),
+ )
+ ),
+ )
+
+ def test_get_untracked_paths_subdir(self):
+ with open(os.path.join(self.repo.path, ".gitignore"), "w") as f:
+ f.write("subdir/\nignored")
+ with open(os.path.join(self.repo.path, "notignored"), "w") as f:
+ f.write("blah\n")
+ os.mkdir(os.path.join(self.repo.path, "subdir"))
+ with open(os.path.join(self.repo.path, "ignored"), "w") as f:
+ f.write("foo")
+ with open(os.path.join(self.repo.path, "subdir", "ignored"), "w") as f:
+ f.write("foo")
+
+ self.assertEqual(
+ {
+ ".gitignore",
+ "notignored",
+ "ignored",
+ os.path.join("subdir", ""),
+ },
+ set(
+ porcelain.get_untracked_paths(
+ self.repo.path,
+ self.repo.path,
+ self.repo.open_index(),
+ )
+ ),
+ )
+ self.assertEqual(
+ {".gitignore", "notignored"},
+ set(
+ porcelain.get_untracked_paths(
+ self.repo.path,
+ self.repo.path,
+ self.repo.open_index(),
+ exclude_ignored=True,
+ )
+ ),
+ )
+
+ def test_get_untracked_paths_invalid_untracked_files(self):
+ with self.assertRaises(ValueError):
+ list(
+ porcelain.get_untracked_paths(
+ self.repo.path,
+ self.repo.path,
+ self.repo.open_index(),
+ untracked_files="invalid_value",
+ )
+ )
+
+ def test_get_untracked_paths_normal(self):
+ with self.assertRaises(NotImplementedError):
+ _, _, _ = porcelain.status(repo=self.repo.path, untracked_files="normal")
+
+
+# TODO(jelmer): Add test for dulwich.porcelain.daemon
+
+
+class UploadPackTests(PorcelainTestCase):
+ """Tests for upload_pack."""
+
+ def test_upload_pack(self):
+ outf = BytesIO()
+ exitcode = porcelain.upload_pack(self.repo.path, BytesIO(b"0000"), outf)
+ outlines = outf.getvalue().splitlines()
+ self.assertEqual([b"0000"], outlines)
+ self.assertEqual(0, exitcode)
+
+
+class ReceivePackTests(PorcelainTestCase):
+ """Tests for receive_pack."""
+
+ def test_receive_pack(self):
+ filename = "foo"
+ fullpath = os.path.join(self.repo.path, filename)
+ with open(fullpath, "w") as f:
+ f.write("stuff")
+ porcelain.add(repo=self.repo.path, paths=fullpath)
+ self.repo.do_commit(
+ message=b"test status",
+ author=b"author <email>",
+ committer=b"committer <email>",
+ author_timestamp=1402354300,
+ commit_timestamp=1402354300,
+ author_timezone=0,
+ commit_timezone=0,
+ )
+ outf = BytesIO()
+ exitcode = porcelain.receive_pack(self.repo.path, BytesIO(b"0000"), outf)
+ outlines = outf.getvalue().splitlines()
+ self.assertEqual(
+ [
+ b"0091319b56ce3aee2d489f759736a79cc552c9bb86d9 HEAD\x00 report-status "
+ b"delete-refs quiet ofs-delta side-band-64k "
+ b"no-done symref=HEAD:refs/heads/master",
+ b"003f319b56ce3aee2d489f759736a79cc552c9bb86d9 refs/heads/master",
+ b"0000",
+ ],
+ outlines,
+ )
+ self.assertEqual(0, exitcode)
+
+
+class BranchListTests(PorcelainTestCase):
+ def test_standard(self):
+ self.assertEqual(set(), set(porcelain.branch_list(self.repo)))
+
+ def test_new_branch(self):
+ [c1] = build_commit_graph(self.repo.object_store, [[1]])
+ self.repo[b"HEAD"] = c1.id
+ porcelain.branch_create(self.repo, b"foo")
+ self.assertEqual({b"master", b"foo"}, set(porcelain.branch_list(self.repo)))
+
+
+class BranchCreateTests(PorcelainTestCase):
+ def test_branch_exists(self):
+ [c1] = build_commit_graph(self.repo.object_store, [[1]])
+ self.repo[b"HEAD"] = c1.id
+ porcelain.branch_create(self.repo, b"foo")
+ self.assertRaises(porcelain.Error, porcelain.branch_create, self.repo, b"foo")
+ porcelain.branch_create(self.repo, b"foo", force=True)
+
+ def test_new_branch(self):
+ [c1] = build_commit_graph(self.repo.object_store, [[1]])
+ self.repo[b"HEAD"] = c1.id
+ porcelain.branch_create(self.repo, b"foo")
+ self.assertEqual({b"master", b"foo"}, set(porcelain.branch_list(self.repo)))
+
+
+class BranchDeleteTests(PorcelainTestCase):
+ def test_simple(self):
+ [c1] = build_commit_graph(self.repo.object_store, [[1]])
+ self.repo[b"HEAD"] = c1.id
+ porcelain.branch_create(self.repo, b"foo")
+ self.assertIn(b"foo", porcelain.branch_list(self.repo))
+ porcelain.branch_delete(self.repo, b"foo")
+ self.assertNotIn(b"foo", porcelain.branch_list(self.repo))
+
+ def test_simple_unicode(self):
+ [c1] = build_commit_graph(self.repo.object_store, [[1]])
+ self.repo[b"HEAD"] = c1.id
+ porcelain.branch_create(self.repo, "foo")
+ self.assertIn(b"foo", porcelain.branch_list(self.repo))
+ porcelain.branch_delete(self.repo, "foo")
+ self.assertNotIn(b"foo", porcelain.branch_list(self.repo))
+
+
+class FetchTests(PorcelainTestCase):
+ def test_simple(self):
+ outstream = BytesIO()
+ errstream = BytesIO()
+
+ # create a file for initial commit
+ handle, fullpath = tempfile.mkstemp(dir=self.repo.path)
+ os.close(handle)
+ porcelain.add(repo=self.repo.path, paths=fullpath)
+ porcelain.commit(
+ repo=self.repo.path,
+ message=b"test",
+ author=b"test <email>",
+ committer=b"test <email>",
+ )
+
+ # Setup target repo
+ target_path = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, target_path)
+ target_repo = porcelain.clone(
+ self.repo.path, target=target_path, errstream=errstream
+ )
+
+ # create a second file to be pushed
+ handle, fullpath = tempfile.mkstemp(dir=self.repo.path)
+ os.close(handle)
+ porcelain.add(repo=self.repo.path, paths=fullpath)
+ porcelain.commit(
+ repo=self.repo.path,
+ message=b"test2",
+ author=b"test2 <email>",
+ committer=b"test2 <email>",
+ )
+
+ self.assertNotIn(self.repo[b"HEAD"].id, target_repo)
+ target_repo.close()
+
+ # Fetch changes into the cloned repo
+ porcelain.fetch(target_path, "origin", outstream=outstream, errstream=errstream)
+
+ # Assert that fetch updated the local image of the remote
+ self.assert_correct_remote_refs(target_repo.get_refs(), self.repo.get_refs())
+
+ # Check the target repo for pushed changes
+ with Repo(target_path) as r:
+ self.assertIn(self.repo[b"HEAD"].id, r)
+
+ def test_with_remote_name(self):
+ remote_name = "origin"
+ outstream = BytesIO()
+ errstream = BytesIO()
+
+ # create a file for initial commit
+ handle, fullpath = tempfile.mkstemp(dir=self.repo.path)
+ os.close(handle)
+ porcelain.add(repo=self.repo.path, paths=fullpath)
+ porcelain.commit(
+ repo=self.repo.path,
+ message=b"test",
+ author=b"test <email>",
+ committer=b"test <email>",
+ )
+
+ # Setup target repo
+ target_path = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, target_path)
+ target_repo = porcelain.clone(
+ self.repo.path, target=target_path, errstream=errstream
+ )
+
+ # Capture current refs
+ target_refs = target_repo.get_refs()
+
+ # create a second file to be pushed
+ handle, fullpath = tempfile.mkstemp(dir=self.repo.path)
+ os.close(handle)
+ porcelain.add(repo=self.repo.path, paths=fullpath)
+ porcelain.commit(
+ repo=self.repo.path,
+ message=b"test2",
+ author=b"test2 <email>",
+ committer=b"test2 <email>",
+ )
+
+ self.assertNotIn(self.repo[b"HEAD"].id, target_repo)
+
+ target_config = target_repo.get_config()
+ target_config.set(
+ (b"remote", remote_name.encode()), b"url", self.repo.path.encode()
+ )
+ target_repo.close()
+
+ # Fetch changes into the cloned repo
+ porcelain.fetch(
+ target_path, remote_name, outstream=outstream, errstream=errstream
+ )
+
+ # Assert that fetch updated the local image of the remote
+ self.assert_correct_remote_refs(target_repo.get_refs(), self.repo.get_refs())
+
+ # Check the target repo for pushed changes, as well as updates
+ # for the refs
+ with Repo(target_path) as r:
+ self.assertIn(self.repo[b"HEAD"].id, r)
+ self.assertNotEqual(self.repo.get_refs(), target_refs)
+
+ def assert_correct_remote_refs(
+ self, local_refs, remote_refs, remote_name=b"origin"
+ ):
+ """Assert that known remote refs corresponds to actual remote refs."""
+ local_ref_prefix = b"refs/heads"
+ remote_ref_prefix = b"refs/remotes/" + remote_name
+
+ locally_known_remote_refs = {
+ k[len(remote_ref_prefix) + 1 :]: v
+ for k, v in local_refs.items()
+ if k.startswith(remote_ref_prefix)
+ }
+
+ normalized_remote_refs = {
+ k[len(local_ref_prefix) + 1 :]: v
+ for k, v in remote_refs.items()
+ if k.startswith(local_ref_prefix)
+ }
+ if b"HEAD" in locally_known_remote_refs and b"HEAD" in remote_refs:
+ normalized_remote_refs[b"HEAD"] = remote_refs[b"HEAD"]
+
+ self.assertEqual(locally_known_remote_refs, normalized_remote_refs)
+
+
+class RepackTests(PorcelainTestCase):
+ def test_empty(self):
+ porcelain.repack(self.repo)
+
+ def test_simple(self):
+ handle, fullpath = tempfile.mkstemp(dir=self.repo.path)
+ os.close(handle)
+ porcelain.add(repo=self.repo.path, paths=fullpath)
+ porcelain.repack(self.repo)
+
+
+class LsTreeTests(PorcelainTestCase):
+ def test_empty(self):
+ porcelain.commit(
+ repo=self.repo.path,
+ message=b"test status",
+ author=b"author <email>",
+ committer=b"committer <email>",
+ )
+
+ f = StringIO()
+ porcelain.ls_tree(self.repo, b"HEAD", outstream=f)
+ self.assertEqual(f.getvalue(), "")
+
+ def test_simple(self):
+ # Commit a dummy file then modify it
+ fullpath = os.path.join(self.repo.path, "foo")
+ with open(fullpath, "w") as f:
+ f.write("origstuff")
+
+ porcelain.add(repo=self.repo.path, paths=[fullpath])
+ porcelain.commit(
+ repo=self.repo.path,
+ message=b"test status",
+ author=b"author <email>",
+ committer=b"committer <email>",
+ )
+
+ f = StringIO()
+ porcelain.ls_tree(self.repo, b"HEAD", outstream=f)
+ self.assertEqual(
+ f.getvalue(),
+ "100644 blob 8b82634d7eae019850bb883f06abf428c58bc9aa\tfoo\n",
+ )
+
+ def test_recursive(self):
+ # Create a directory then write a dummy file in it
+ dirpath = os.path.join(self.repo.path, "adir")
+ filepath = os.path.join(dirpath, "afile")
+ os.mkdir(dirpath)
+ with open(filepath, "w") as f:
+ f.write("origstuff")
+ porcelain.add(repo=self.repo.path, paths=[filepath])
+ porcelain.commit(
+ repo=self.repo.path,
+ message=b"test status",
+ author=b"author <email>",
+ committer=b"committer <email>",
+ )
+ f = StringIO()
+ porcelain.ls_tree(self.repo, b"HEAD", outstream=f)
+ self.assertEqual(
+ f.getvalue(),
+ "40000 tree b145cc69a5e17693e24d8a7be0016ed8075de66d\tadir\n",
+ )
+ f = StringIO()
+ porcelain.ls_tree(self.repo, b"HEAD", outstream=f, recursive=True)
+ self.assertEqual(
+ f.getvalue(),
+ "40000 tree b145cc69a5e17693e24d8a7be0016ed8075de66d\tadir\n"
+ "100644 blob 8b82634d7eae019850bb883f06abf428c58bc9aa\tadir"
+ "/afile\n",
+ )
+
+
+class LsRemoteTests(PorcelainTestCase):
+ def test_empty(self):
+ self.assertEqual({}, porcelain.ls_remote(self.repo.path))
+
+ def test_some(self):
+ cid = porcelain.commit(
+ repo=self.repo.path,
+ message=b"test status",
+ author=b"author <email>",
+ committer=b"committer <email>",
+ )
+
+ self.assertEqual(
+ {b"refs/heads/master": cid, b"HEAD": cid},
+ porcelain.ls_remote(self.repo.path),
+ )
+
+
+class LsFilesTests(PorcelainTestCase):
+ def test_empty(self):
+ self.assertEqual([], list(porcelain.ls_files(self.repo)))
+
+ def test_simple(self):
+ # Commit a dummy file then modify it
+ fullpath = os.path.join(self.repo.path, "foo")
+ with open(fullpath, "w") as f:
+ f.write("origstuff")
+
+ porcelain.add(repo=self.repo.path, paths=[fullpath])
+ self.assertEqual([b"foo"], list(porcelain.ls_files(self.repo)))
+
+
+class RemoteAddTests(PorcelainTestCase):
+ def test_new(self):
+ porcelain.remote_add(self.repo, "jelmer", "git://jelmer.uk/code/dulwich")
+ c = self.repo.get_config()
+ self.assertEqual(
+ c.get((b"remote", b"jelmer"), b"url"),
+ b"git://jelmer.uk/code/dulwich",
+ )
+
+ def test_exists(self):
+ porcelain.remote_add(self.repo, "jelmer", "git://jelmer.uk/code/dulwich")
+ self.assertRaises(
+ porcelain.RemoteExists,
+ porcelain.remote_add,
+ self.repo,
+ "jelmer",
+ "git://jelmer.uk/code/dulwich",
+ )
+
+
+class RemoteRemoveTests(PorcelainTestCase):
+ def test_remove(self):
+ porcelain.remote_add(self.repo, "jelmer", "git://jelmer.uk/code/dulwich")
+ c = self.repo.get_config()
+ self.assertEqual(
+ c.get((b"remote", b"jelmer"), b"url"),
+ b"git://jelmer.uk/code/dulwich",
+ )
+ porcelain.remote_remove(self.repo, "jelmer")
+ self.assertRaises(KeyError, porcelain.remote_remove, self.repo, "jelmer")
+ c = self.repo.get_config()
+ self.assertRaises(KeyError, c.get, (b"remote", b"jelmer"), b"url")
+
+
+class CheckIgnoreTests(PorcelainTestCase):
+ def test_check_ignored(self):
+ with open(os.path.join(self.repo.path, ".gitignore"), "w") as f:
+ f.write("foo")
+ foo_path = os.path.join(self.repo.path, "foo")
+ with open(foo_path, "w") as f:
+ f.write("BAR")
+ bar_path = os.path.join(self.repo.path, "bar")
+ with open(bar_path, "w") as f:
+ f.write("BAR")
+ self.assertEqual(["foo"], list(porcelain.check_ignore(self.repo, [foo_path])))
+ self.assertEqual([], list(porcelain.check_ignore(self.repo, [bar_path])))
+
+ def test_check_added_abs(self):
+ path = os.path.join(self.repo.path, "foo")
+ with open(path, "w") as f:
+ f.write("BAR")
+ self.repo.stage(["foo"])
+ with open(os.path.join(self.repo.path, ".gitignore"), "w") as f:
+ f.write("foo\n")
+ self.assertEqual([], list(porcelain.check_ignore(self.repo, [path])))
+ self.assertEqual(
+ ["foo"],
+ list(porcelain.check_ignore(self.repo, [path], no_index=True)),
+ )
+
+ def test_check_added_rel(self):
+ with open(os.path.join(self.repo.path, "foo"), "w") as f:
+ f.write("BAR")
+ self.repo.stage(["foo"])
+ with open(os.path.join(self.repo.path, ".gitignore"), "w") as f:
+ f.write("foo\n")
+ cwd = os.getcwd()
+ os.mkdir(os.path.join(self.repo.path, "bar"))
+ os.chdir(os.path.join(self.repo.path, "bar"))
+ try:
+ self.assertEqual(list(porcelain.check_ignore(self.repo, ["../foo"])), [])
+ self.assertEqual(
+ ["../foo"],
+ list(porcelain.check_ignore(self.repo, ["../foo"], no_index=True)),
+ )
+ finally:
+ os.chdir(cwd)
+
+
+class UpdateHeadTests(PorcelainTestCase):
+ def test_set_to_branch(self):
+ [c1] = build_commit_graph(self.repo.object_store, [[1]])
+ self.repo.refs[b"refs/heads/blah"] = c1.id
+ porcelain.update_head(self.repo, "blah")
+ self.assertEqual(c1.id, self.repo.head())
+ self.assertEqual(b"ref: refs/heads/blah", self.repo.refs.read_ref(b"HEAD"))
+
+ def test_set_to_branch_detached(self):
+ [c1] = build_commit_graph(self.repo.object_store, [[1]])
+ self.repo.refs[b"refs/heads/blah"] = c1.id
+ porcelain.update_head(self.repo, "blah", detached=True)
+ self.assertEqual(c1.id, self.repo.head())
+ self.assertEqual(c1.id, self.repo.refs.read_ref(b"HEAD"))
+
+ def test_set_to_commit_detached(self):
+ [c1] = build_commit_graph(self.repo.object_store, [[1]])
+ self.repo.refs[b"refs/heads/blah"] = c1.id
+ porcelain.update_head(self.repo, c1.id, detached=True)
+ self.assertEqual(c1.id, self.repo.head())
+ self.assertEqual(c1.id, self.repo.refs.read_ref(b"HEAD"))
+
+ def test_set_new_branch(self):
+ [c1] = build_commit_graph(self.repo.object_store, [[1]])
+ self.repo.refs[b"refs/heads/blah"] = c1.id
+ porcelain.update_head(self.repo, "blah", new_branch="bar")
+ self.assertEqual(c1.id, self.repo.head())
+ self.assertEqual(b"ref: refs/heads/bar", self.repo.refs.read_ref(b"HEAD"))
+
+
+class MailmapTests(PorcelainTestCase):
+ def test_no_mailmap(self):
+ self.assertEqual(
+ b"Jelmer Vernooij <jelmer@samba.org>",
+ porcelain.check_mailmap(self.repo, b"Jelmer Vernooij <jelmer@samba.org>"),
+ )
+
+ def test_mailmap_lookup(self):
+ with open(os.path.join(self.repo.path, ".mailmap"), "wb") as f:
+ f.write(
+ b"""\
+Jelmer Vernooij <jelmer@debian.org>
+"""
+ )
+ self.assertEqual(
+ b"Jelmer Vernooij <jelmer@debian.org>",
+ porcelain.check_mailmap(self.repo, b"Jelmer Vernooij <jelmer@samba.org>"),
+ )
+
+
+class FsckTests(PorcelainTestCase):
+ def test_none(self):
+ self.assertEqual([], list(porcelain.fsck(self.repo)))
+
+ def test_git_dir(self):
+ obj = Tree()
+ a = Blob()
+ a.data = b"foo"
+ obj.add(b".git", 0o100644, a.id)
+ self.repo.object_store.add_objects([(a, None), (obj, None)])
+ self.assertEqual(
+ [(obj.id, "invalid name .git")],
+ [(sha, str(e)) for (sha, e) in porcelain.fsck(self.repo)],
+ )
+
+
+class DescribeTests(PorcelainTestCase):
+ def test_no_commits(self):
+ self.assertRaises(KeyError, porcelain.describe, self.repo.path)
+
+ def test_single_commit(self):
+ fullpath = os.path.join(self.repo.path, "foo")
+ with open(fullpath, "w") as f:
+ f.write("BAR")
+ porcelain.add(repo=self.repo.path, paths=[fullpath])
+ sha = porcelain.commit(
+ self.repo.path,
+ message=b"Some message",
+ author=b"Joe <joe@example.com>",
+ committer=b"Bob <bob@example.com>",
+ )
+ self.assertEqual(
+ "g{}".format(sha[:7].decode("ascii")),
+ porcelain.describe(self.repo.path),
+ )
+
+ def test_tag(self):
+ fullpath = os.path.join(self.repo.path, "foo")
+ with open(fullpath, "w") as f:
+ f.write("BAR")
+ porcelain.add(repo=self.repo.path, paths=[fullpath])
+ porcelain.commit(
+ self.repo.path,
+ message=b"Some message",
+ author=b"Joe <joe@example.com>",
+ committer=b"Bob <bob@example.com>",
+ )
+ porcelain.tag_create(
+ self.repo.path,
+ b"tryme",
+ b"foo <foo@bar.com>",
+ b"bar",
+ annotated=True,
+ )
+ self.assertEqual("tryme", porcelain.describe(self.repo.path))
+
+ def test_tag_and_commit(self):
+ fullpath = os.path.join(self.repo.path, "foo")
+ with open(fullpath, "w") as f:
+ f.write("BAR")
+ porcelain.add(repo=self.repo.path, paths=[fullpath])
+ porcelain.commit(
+ self.repo.path,
+ message=b"Some message",
+ author=b"Joe <joe@example.com>",
+ committer=b"Bob <bob@example.com>",
+ )
+ porcelain.tag_create(
+ self.repo.path,
+ b"tryme",
+ b"foo <foo@bar.com>",
+ b"bar",
+ annotated=True,
+ )
+ with open(fullpath, "w") as f:
+ f.write("BAR2")
+ porcelain.add(repo=self.repo.path, paths=[fullpath])
+ sha = porcelain.commit(
+ self.repo.path,
+ message=b"Some message",
+ author=b"Joe <joe@example.com>",
+ committer=b"Bob <bob@example.com>",
+ )
+ self.assertEqual(
+ "tryme-1-g{}".format(sha[:7].decode("ascii")),
+ porcelain.describe(self.repo.path),
+ )
+
+ def test_tag_and_commit_full(self):
+ fullpath = os.path.join(self.repo.path, "foo")
+ with open(fullpath, "w") as f:
+ f.write("BAR")
+ porcelain.add(repo=self.repo.path, paths=[fullpath])
+ porcelain.commit(
+ self.repo.path,
+ message=b"Some message",
+ author=b"Joe <joe@example.com>",
+ committer=b"Bob <bob@example.com>",
+ )
+ porcelain.tag_create(
+ self.repo.path,
+ b"tryme",
+ b"foo <foo@bar.com>",
+ b"bar",
+ annotated=True,
+ )
+ with open(fullpath, "w") as f:
+ f.write("BAR2")
+ porcelain.add(repo=self.repo.path, paths=[fullpath])
+ sha = porcelain.commit(
+ self.repo.path,
+ message=b"Some message",
+ author=b"Joe <joe@example.com>",
+ committer=b"Bob <bob@example.com>",
+ )
+ self.assertEqual(
+ "tryme-1-g{}".format(sha.decode("ascii")),
+ porcelain.describe(self.repo.path, abbrev=40),
+ )
+
+
+class PathToTreeTests(PorcelainTestCase):
+ def setUp(self):
+ super().setUp()
+ self.fp = os.path.join(self.test_dir, "bar")
+ with open(self.fp, "w") as f:
+ f.write("something")
+ oldcwd = os.getcwd()
+ self.addCleanup(os.chdir, oldcwd)
+ os.chdir(self.test_dir)
+
+ def test_path_to_tree_path_base(self):
+ self.assertEqual(b"bar", porcelain.path_to_tree_path(self.test_dir, self.fp))
+ self.assertEqual(b"bar", porcelain.path_to_tree_path(".", "./bar"))
+ self.assertEqual(b"bar", porcelain.path_to_tree_path(".", "bar"))
+ cwd = os.getcwd()
+ self.assertEqual(
+ b"bar", porcelain.path_to_tree_path(".", os.path.join(cwd, "bar"))
+ )
+ self.assertEqual(b"bar", porcelain.path_to_tree_path(cwd, "bar"))
+
+ def test_path_to_tree_path_syntax(self):
+ self.assertEqual(b"bar", porcelain.path_to_tree_path(".", "./bar"))
+
+ def test_path_to_tree_path_error(self):
+ with self.assertRaises(ValueError):
+ with tempfile.TemporaryDirectory() as od:
+ porcelain.path_to_tree_path(od, self.fp)
+
+ def test_path_to_tree_path_rel(self):
+ cwd = os.getcwd()
+ os.mkdir(os.path.join(self.repo.path, "foo"))
+ os.mkdir(os.path.join(self.repo.path, "foo/bar"))
+ try:
+ os.chdir(os.path.join(self.repo.path, "foo/bar"))
+ with open("baz", "w") as f:
+ f.write("contents")
+ self.assertEqual(b"bar/baz", porcelain.path_to_tree_path("..", "baz"))
+ self.assertEqual(
+ b"bar/baz",
+ porcelain.path_to_tree_path(
+ os.path.join(os.getcwd(), ".."),
+ os.path.join(os.getcwd(), "baz"),
+ ),
+ )
+ self.assertEqual(
+ b"bar/baz",
+ porcelain.path_to_tree_path("..", os.path.join(os.getcwd(), "baz")),
+ )
+ self.assertEqual(
+ b"bar/baz",
+ porcelain.path_to_tree_path(os.path.join(os.getcwd(), ".."), "baz"),
+ )
+ finally:
+ os.chdir(cwd)
+
+
+class GetObjectByPathTests(PorcelainTestCase):
+ def test_simple(self):
+ fullpath = os.path.join(self.repo.path, "foo")
+ with open(fullpath, "w") as f:
+ f.write("BAR")
+ porcelain.add(repo=self.repo.path, paths=[fullpath])
+ porcelain.commit(
+ self.repo.path,
+ message=b"Some message",
+ author=b"Joe <joe@example.com>",
+ committer=b"Bob <bob@example.com>",
+ )
+ self.assertEqual(b"BAR", porcelain.get_object_by_path(self.repo, "foo").data)
+ self.assertEqual(b"BAR", porcelain.get_object_by_path(self.repo, b"foo").data)
+
+ def test_encoding(self):
+ fullpath = os.path.join(self.repo.path, "foo")
+ with open(fullpath, "w") as f:
+ f.write("BAR")
+ porcelain.add(repo=self.repo.path, paths=[fullpath])
+ porcelain.commit(
+ self.repo.path,
+ message=b"Some message",
+ author=b"Joe <joe@example.com>",
+ committer=b"Bob <bob@example.com>",
+ encoding=b"utf-8",
+ )
+ self.assertEqual(b"BAR", porcelain.get_object_by_path(self.repo, "foo").data)
+ self.assertEqual(b"BAR", porcelain.get_object_by_path(self.repo, b"foo").data)
+
+ def test_missing(self):
+ self.assertRaises(KeyError, porcelain.get_object_by_path, self.repo, "foo")
+
+
+class WriteTreeTests(PorcelainTestCase):
+ def test_simple(self):
+ fullpath = os.path.join(self.repo.path, "foo")
+ with open(fullpath, "w") as f:
+ f.write("BAR")
+ porcelain.add(repo=self.repo.path, paths=[fullpath])
+ self.assertEqual(
+ b"d2092c8a9f311f0311083bf8d177f2ca0ab5b241",
+ porcelain.write_tree(self.repo),
+ )
+
+
+class ActiveBranchTests(PorcelainTestCase):
+ def test_simple(self):
+ self.assertEqual(b"master", porcelain.active_branch(self.repo))
+
+
+class FindUniqueAbbrevTests(PorcelainTestCase):
+ def test_simple(self):
+ c1, c2, c3 = build_commit_graph(
+ self.repo.object_store, [[1], [2, 1], [3, 1, 2]]
+ )
+ self.repo.refs[b"HEAD"] = c3.id
+ self.assertEqual(
+ c1.id.decode("ascii")[:7],
+ porcelain.find_unique_abbrev(self.repo.object_store, c1.id),
+ )
+
+
+class PackRefsTests(PorcelainTestCase):
+ def test_all(self):
+ c1, c2, c3 = build_commit_graph(
+ self.repo.object_store, [[1], [2, 1], [3, 1, 2]]
+ )
+ self.repo.refs[b"HEAD"] = c3.id
+ self.repo.refs[b"refs/heads/master"] = c2.id
+ self.repo.refs[b"refs/tags/foo"] = c1.id
+
+ porcelain.pack_refs(self.repo, all=True)
+
+ self.assertEqual(
+ self.repo.refs.get_packed_refs(),
+ {
+ b"refs/heads/master": c2.id,
+ b"refs/tags/foo": c1.id,
+ },
+ )
+
+ def test_not_all(self):
+ c1, c2, c3 = build_commit_graph(
+ self.repo.object_store, [[1], [2, 1], [3, 1, 2]]
+ )
+ self.repo.refs[b"HEAD"] = c3.id
+ self.repo.refs[b"refs/heads/master"] = c2.id
+ self.repo.refs[b"refs/tags/foo"] = c1.id
+
+ porcelain.pack_refs(self.repo)
+
+ self.assertEqual(
+ self.repo.refs.get_packed_refs(),
+ {
+ b"refs/tags/foo": c1.id,
+ },
+ )
+
+
+class ServerTests(PorcelainTestCase):
+ @contextlib.contextmanager
+ def _serving(self):
+ with make_server("localhost", 0, self.app) as server:
+ thread = threading.Thread(target=server.serve_forever, daemon=True)
+ thread.start()
+
+ try:
+ yield f"http://localhost:{server.server_port}"
+
+ finally:
+ server.shutdown()
+ thread.join(10)
+
+ def setUp(self):
+ super().setUp()
+
+ self.served_repo_path = os.path.join(self.test_dir, "served_repo.git")
+ self.served_repo = Repo.init_bare(self.served_repo_path, mkdir=True)
+ self.addCleanup(self.served_repo.close)
+
+ backend = DictBackend({"/": self.served_repo})
+ self.app = make_wsgi_chain(backend)
+
+ def test_pull(self):
+ (c1,) = build_commit_graph(self.served_repo.object_store, [[1]])
+ self.served_repo.refs[b"refs/heads/master"] = c1.id
+
+ with self._serving() as url:
+ porcelain.pull(self.repo, url, "master")
+
+ def test_push(self):
+ (c1,) = build_commit_graph(self.repo.object_store, [[1]])
+ self.repo.refs[b"refs/heads/master"] = c1.id
+
+ with self._serving() as url:
+ porcelain.push(self.repo, url, "master")
+
+
+class ForEachTests(PorcelainTestCase):
+ def setUp(self):
+ super().setUp()
+ c1, c2, c3, c4 = build_commit_graph(
+ self.repo.object_store, [[1], [2, 1], [3, 1, 2], [4]]
+ )
+ porcelain.tag_create(
+ self.repo.path,
+ b"v0.1",
+ objectish=c1.id,
+ annotated=True,
+ message=b"0.1",
+ )
+ porcelain.tag_create(
+ self.repo.path,
+ b"v1.0",
+ objectish=c2.id,
+ annotated=True,
+ message=b"1.0",
+ )
+ porcelain.tag_create(self.repo.path, b"simple-tag", objectish=c3.id)
+ porcelain.tag_create(
+ self.repo.path,
+ b"v1.1",
+ objectish=c4.id,
+ annotated=True,
+ message=b"1.1",
+ )
+ porcelain.branch_create(
+ self.repo.path, b"feat", objectish=c2.id.decode("ascii")
+ )
+ self.repo.refs[b"HEAD"] = c4.id
+
+ def test_for_each_ref(self):
+ refs = porcelain.for_each_ref(self.repo)
+
+ self.assertEqual(
+ [(object_type, tag) for _, object_type, tag in refs],
+ [
+ (b"commit", b"refs/heads/feat"),
+ (b"commit", b"refs/heads/master"),
+ (b"commit", b"refs/tags/simple-tag"),
+ (b"tag", b"refs/tags/v0.1"),
+ (b"tag", b"refs/tags/v1.0"),
+ (b"tag", b"refs/tags/v1.1"),
+ ],
+ )
+
+ def test_for_each_ref_pattern(self):
+ versions = porcelain.for_each_ref(self.repo, pattern="refs/tags/v*")
+ self.assertEqual(
+ [(object_type, tag) for _, object_type, tag in versions],
+ [
+ (b"tag", b"refs/tags/v0.1"),
+ (b"tag", b"refs/tags/v1.0"),
+ (b"tag", b"refs/tags/v1.1"),
+ ],
+ )
+
+ versions = porcelain.for_each_ref(self.repo, pattern="refs/tags/v1.?")
+ self.assertEqual(
+ [(object_type, tag) for _, object_type, tag in versions],
+ [
+ (b"tag", b"refs/tags/v1.0"),
+ (b"tag", b"refs/tags/v1.1"),
+ ],
+ )
blob - /dev/null
blob + 75c5411760b0e41b717b558c7da4b9eec07eb59f (mode 644)
--- /dev/null
+++ tests/test_protocol.py
+# test_protocol.py -- Tests for the git protocol
+# Copyright (C) 2009 Jelmer Vernooij <jelmer@jelmer.uk>
+#
+# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
+# General Public License as public by the Free Software Foundation; version 2.0
+# or (at your option) any later version. You can redistribute it and/or
+# modify it under the terms of either of these two licenses.
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# You should have received a copy of the licenses; if not, see
+# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
+# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
+# License, Version 2.0.
+#
+
+"""Tests for the smart protocol utility functions."""
+
+from io import BytesIO
+
+from dulwich.errors import HangupException
+from dulwich.protocol import (
+ MULTI_ACK,
+ MULTI_ACK_DETAILED,
+ SINGLE_ACK,
+ BufferedPktLineWriter,
+ GitProtocolError,
+ PktLineParser,
+ Protocol,
+ ReceivableProtocol,
+ ack_type,
+ extract_capabilities,
+ extract_want_line_capabilities,
+)
+
+from . import TestCase
+
+
+class BaseProtocolTests:
+ def test_write_pkt_line_none(self):
+ self.proto.write_pkt_line(None)
+ self.assertEqual(self.rout.getvalue(), b"0000")
+
+ def test_write_pkt_line(self):
+ self.proto.write_pkt_line(b"bla")
+ self.assertEqual(self.rout.getvalue(), b"0007bla")
+
+ def test_read_pkt_line(self):
+ self.rin.write(b"0008cmd ")
+ self.rin.seek(0)
+ self.assertEqual(b"cmd ", self.proto.read_pkt_line())
+
+ def test_eof(self):
+ self.rin.write(b"0000")
+ self.rin.seek(0)
+ self.assertFalse(self.proto.eof())
+ self.assertEqual(None, self.proto.read_pkt_line())
+ self.assertTrue(self.proto.eof())
+ self.assertRaises(HangupException, self.proto.read_pkt_line)
+
+ def test_unread_pkt_line(self):
+ self.rin.write(b"0007foo0000")
+ self.rin.seek(0)
+ self.assertEqual(b"foo", self.proto.read_pkt_line())
+ self.proto.unread_pkt_line(b"bar")
+ self.assertEqual(b"bar", self.proto.read_pkt_line())
+ self.assertEqual(None, self.proto.read_pkt_line())
+ self.proto.unread_pkt_line(b"baz1")
+ self.assertRaises(ValueError, self.proto.unread_pkt_line, b"baz2")
+
+ def test_read_pkt_seq(self):
+ self.rin.write(b"0008cmd 0005l0000")
+ self.rin.seek(0)
+ self.assertEqual([b"cmd ", b"l"], list(self.proto.read_pkt_seq()))
+
+ def test_read_pkt_line_none(self):
+ self.rin.write(b"0000")
+ self.rin.seek(0)
+ self.assertEqual(None, self.proto.read_pkt_line())
+
+ def test_read_pkt_line_wrong_size(self):
+ self.rin.write(b"0100too short")
+ self.rin.seek(0)
+ self.assertRaises(GitProtocolError, self.proto.read_pkt_line)
+
+ def test_write_sideband(self):
+ self.proto.write_sideband(3, b"bloe")
+ self.assertEqual(self.rout.getvalue(), b"0009\x03bloe")
+
+ def test_send_cmd(self):
+ self.proto.send_cmd(b"fetch", b"a", b"b")
+ self.assertEqual(self.rout.getvalue(), b"000efetch a\x00b\x00")
+
+ def test_read_cmd(self):
+ self.rin.write(b"0012cmd arg1\x00arg2\x00")
+ self.rin.seek(0)
+ self.assertEqual((b"cmd", [b"arg1", b"arg2"]), self.proto.read_cmd())
+
+ def test_read_cmd_noend0(self):
+ self.rin.write(b"0011cmd arg1\x00arg2")
+ self.rin.seek(0)
+ self.assertRaises(AssertionError, self.proto.read_cmd)
+
+
+class ProtocolTests(BaseProtocolTests, TestCase):
+ def setUp(self):
+ TestCase.setUp(self)
+ self.rout = BytesIO()
+ self.rin = BytesIO()
+ self.proto = Protocol(self.rin.read, self.rout.write)
+
+
+class ReceivableBytesIO(BytesIO):
+ """BytesIO with socket-like recv semantics for testing."""
+
+ def __init__(self) -> None:
+ BytesIO.__init__(self)
+ self.allow_read_past_eof = False
+
+ def recv(self, size):
+ # fail fast if no bytes are available; in a real socket, this would
+ # block forever
+ if self.tell() == len(self.getvalue()) and not self.allow_read_past_eof:
+ raise GitProtocolError("Blocking read past end of socket")
+ if size == 1:
+ return self.read(1)
+ # calls shouldn't return quite as much as asked for
+ return self.read(size - 1)
+
+
+class ReceivableProtocolTests(BaseProtocolTests, TestCase):
+ def setUp(self):
+ TestCase.setUp(self)
+ self.rout = BytesIO()
+ self.rin = ReceivableBytesIO()
+ self.proto = ReceivableProtocol(self.rin.recv, self.rout.write)
+ self.proto._rbufsize = 8
+
+ def test_eof(self):
+ # Allow blocking reads past EOF just for this test. The only parts of
+ # the protocol that might check for EOF do not depend on the recv()
+ # semantics anyway.
+ self.rin.allow_read_past_eof = True
+ BaseProtocolTests.test_eof(self)
+
+ def test_recv(self):
+ all_data = b"1234567" * 10 # not a multiple of bufsize
+ self.rin.write(all_data)
+ self.rin.seek(0)
+ data = b""
+ # We ask for 8 bytes each time and actually read 7, so it should take
+ # exactly 10 iterations.
+ for _ in range(10):
+ data += self.proto.recv(10)
+ # any more reads would block
+ self.assertRaises(GitProtocolError, self.proto.recv, 10)
+ self.assertEqual(all_data, data)
+
+ def test_recv_read(self):
+ all_data = b"1234567" # recv exactly in one call
+ self.rin.write(all_data)
+ self.rin.seek(0)
+ self.assertEqual(b"1234", self.proto.recv(4))
+ self.assertEqual(b"567", self.proto.read(3))
+ self.assertRaises(GitProtocolError, self.proto.recv, 10)
+
+ def test_read_recv(self):
+ all_data = b"12345678abcdefg"
+ self.rin.write(all_data)
+ self.rin.seek(0)
+ self.assertEqual(b"1234", self.proto.read(4))
+ self.assertEqual(b"5678abc", self.proto.recv(8))
+ self.assertEqual(b"defg", self.proto.read(4))
+ self.assertRaises(GitProtocolError, self.proto.recv, 10)
+
+ def test_mixed(self):
+ # arbitrary non-repeating string
+ all_data = b",".join(str(i).encode("ascii") for i in range(100))
+ self.rin.write(all_data)
+ self.rin.seek(0)
+ data = b""
+
+ for i in range(1, 100):
+ data += self.proto.recv(i)
+ # if we get to the end, do a non-blocking read instead of blocking
+ if len(data) + i > len(all_data):
+ data += self.proto.recv(i)
+ # ReceivableBytesIO leaves off the last byte unless we ask
+ # nicely
+ data += self.proto.recv(1)
+ break
+ else:
+ data += self.proto.read(i)
+ else:
+ # didn't break, something must have gone wrong
+ self.fail()
+
+ self.assertEqual(all_data, data)
+
+
+class CapabilitiesTestCase(TestCase):
+ def test_plain(self):
+ self.assertEqual((b"bla", []), extract_capabilities(b"bla"))
+
+ def test_caps(self):
+ self.assertEqual((b"bla", [b"la"]), extract_capabilities(b"bla\0la"))
+ self.assertEqual((b"bla", [b"la"]), extract_capabilities(b"bla\0la\n"))
+ self.assertEqual((b"bla", [b"la", b"la"]), extract_capabilities(b"bla\0la la"))
+
+ def test_plain_want_line(self):
+ self.assertEqual((b"want bla", []), extract_want_line_capabilities(b"want bla"))
+
+ def test_caps_want_line(self):
+ self.assertEqual(
+ (b"want bla", [b"la"]),
+ extract_want_line_capabilities(b"want bla la"),
+ )
+ self.assertEqual(
+ (b"want bla", [b"la"]),
+ extract_want_line_capabilities(b"want bla la\n"),
+ )
+ self.assertEqual(
+ (b"want bla", [b"la", b"la"]),
+ extract_want_line_capabilities(b"want bla la la"),
+ )
+
+ def test_ack_type(self):
+ self.assertEqual(SINGLE_ACK, ack_type([b"foo", b"bar"]))
+ self.assertEqual(MULTI_ACK, ack_type([b"foo", b"bar", b"multi_ack"]))
+ self.assertEqual(
+ MULTI_ACK_DETAILED,
+ ack_type([b"foo", b"bar", b"multi_ack_detailed"]),
+ )
+ # choose detailed when both present
+ self.assertEqual(
+ MULTI_ACK_DETAILED,
+ ack_type([b"foo", b"bar", b"multi_ack", b"multi_ack_detailed"]),
+ )
+
+
+class BufferedPktLineWriterTests(TestCase):
+ def setUp(self):
+ TestCase.setUp(self)
+ self._output = BytesIO()
+ self._writer = BufferedPktLineWriter(self._output.write, bufsize=16)
+
+ def assertOutputEquals(self, expected):
+ self.assertEqual(expected, self._output.getvalue())
+
+ def _truncate(self):
+ self._output.seek(0)
+ self._output.truncate()
+
+ def test_write(self):
+ self._writer.write(b"foo")
+ self.assertOutputEquals(b"")
+ self._writer.flush()
+ self.assertOutputEquals(b"0007foo")
+
+ def test_write_none(self):
+ self._writer.write(None)
+ self.assertOutputEquals(b"")
+ self._writer.flush()
+ self.assertOutputEquals(b"0000")
+
+ def test_flush_empty(self):
+ self._writer.flush()
+ self.assertOutputEquals(b"")
+
+ def test_write_multiple(self):
+ self._writer.write(b"foo")
+ self._writer.write(b"bar")
+ self.assertOutputEquals(b"")
+ self._writer.flush()
+ self.assertOutputEquals(b"0007foo0007bar")
+
+ def test_write_across_boundary(self):
+ self._writer.write(b"foo")
+ self._writer.write(b"barbaz")
+ self.assertOutputEquals(b"0007foo000abarba")
+ self._truncate()
+ self._writer.flush()
+ self.assertOutputEquals(b"z")
+
+ def test_write_to_boundary(self):
+ self._writer.write(b"foo")
+ self._writer.write(b"barba")
+ self.assertOutputEquals(b"0007foo0009barba")
+ self._truncate()
+ self._writer.write(b"z")
+ self._writer.flush()
+ self.assertOutputEquals(b"0005z")
+
+
+class PktLineParserTests(TestCase):
+ def test_none(self):
+ pktlines = []
+ parser = PktLineParser(pktlines.append)
+ parser.parse(b"0000")
+ self.assertEqual(pktlines, [None])
+ self.assertEqual(b"", parser.get_tail())
+
+ def test_small_fragments(self):
+ pktlines = []
+ parser = PktLineParser(pktlines.append)
+ parser.parse(b"00")
+ parser.parse(b"05")
+ parser.parse(b"z0000")
+ self.assertEqual(pktlines, [b"z", None])
+ self.assertEqual(b"", parser.get_tail())
+
+ def test_multiple_packets(self):
+ pktlines = []
+ parser = PktLineParser(pktlines.append)
+ parser.parse(b"0005z0006aba")
+ self.assertEqual(pktlines, [b"z", b"ab"])
+ self.assertEqual(b"a", parser.get_tail())
blob - /dev/null
blob + a8b4cf92e640eb362f55eb4aaaf5d4fd877cbc2e (mode 644)
--- /dev/null
+++ tests/test_reflog.py
+# test_reflog.py -- tests for reflog.py
+# Copyright (C) 2015 Jelmer Vernooij <jelmer@jelmer.uk>
+#
+# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
+# General Public License as public by the Free Software Foundation; version 2.0
+# or (at your option) any later version. You can redistribute it and/or
+# modify it under the terms of either of these two licenses.
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# You should have received a copy of the licenses; if not, see
+# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
+# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
+# License, Version 2.0.
+#
+
+"""Tests for dulwich.reflog."""
+
+from io import BytesIO
+
+from dulwich.objects import ZERO_SHA
+from dulwich.reflog import (
+ drop_reflog_entry,
+ format_reflog_line,
+ parse_reflog_line,
+ read_reflog,
+)
+
+from . import TestCase
+
+
+class ReflogLineTests(TestCase):
+ def test_format(self):
+ self.assertEqual(
+ b"0000000000000000000000000000000000000000 "
+ b"49030649db3dfec5a9bc03e5dde4255a14499f16 Jelmer Vernooij "
+ b"<jelmer@jelmer.uk> 1446552482 +0000 "
+ b"clone: from git://jelmer.uk/samba",
+ format_reflog_line(
+ b"0000000000000000000000000000000000000000",
+ b"49030649db3dfec5a9bc03e5dde4255a14499f16",
+ b"Jelmer Vernooij <jelmer@jelmer.uk>",
+ 1446552482,
+ 0,
+ b"clone: from git://jelmer.uk/samba",
+ ),
+ )
+
+ self.assertEqual(
+ b"0000000000000000000000000000000000000000 "
+ b"49030649db3dfec5a9bc03e5dde4255a14499f16 Jelmer Vernooij "
+ b"<jelmer@jelmer.uk> 1446552482 +0000 "
+ b"clone: from git://jelmer.uk/samba",
+ format_reflog_line(
+ None,
+ b"49030649db3dfec5a9bc03e5dde4255a14499f16",
+ b"Jelmer Vernooij <jelmer@jelmer.uk>",
+ 1446552482,
+ 0,
+ b"clone: from git://jelmer.uk/samba",
+ ),
+ )
+
+ def test_parse(self):
+ reflog_line = (
+ b"0000000000000000000000000000000000000000 "
+ b"49030649db3dfec5a9bc03e5dde4255a14499f16 Jelmer Vernooij "
+ b"<jelmer@jelmer.uk> 1446552482 +0000 "
+ b"clone: from git://jelmer.uk/samba"
+ )
+ self.assertEqual(
+ (
+ b"0000000000000000000000000000000000000000",
+ b"49030649db3dfec5a9bc03e5dde4255a14499f16",
+ b"Jelmer Vernooij <jelmer@jelmer.uk>",
+ 1446552482,
+ 0,
+ b"clone: from git://jelmer.uk/samba",
+ ),
+ parse_reflog_line(reflog_line),
+ )
+
+
+_TEST_REFLOG = (
+ b"0000000000000000000000000000000000000000 "
+ b"49030649db3dfec5a9bc03e5dde4255a14499f16 Jelmer Vernooij "
+ b"<jelmer@jelmer.uk> 1446552482 +0000 "
+ b"clone: from git://jelmer.uk/samba\n"
+ b"49030649db3dfec5a9bc03e5dde4255a14499f16 "
+ b"42d06bd4b77fed026b154d16493e5deab78f02ec Jelmer Vernooij "
+ b"<jelmer@jelmer.uk> 1446552483 +0000 "
+ b"clone: from git://jelmer.uk/samba\n"
+ b"42d06bd4b77fed026b154d16493e5deab78f02ec "
+ b"df6800012397fb85c56e7418dd4eb9405dee075c Jelmer Vernooij "
+ b"<jelmer@jelmer.uk> 1446552484 +0000 "
+ b"clone: from git://jelmer.uk/samba\n"
+)
+
+
+class ReflogDropTests(TestCase):
+ def setUp(self):
+ TestCase.setUp(self)
+ self.f = BytesIO(_TEST_REFLOG)
+ self.original_log = list(read_reflog(self.f))
+ self.f.seek(0)
+
+ def _read_log(self):
+ self.f.seek(0)
+ return list(read_reflog(self.f))
+
+ def test_invalid(self):
+ self.assertRaises(ValueError, drop_reflog_entry, self.f, -1)
+
+ def test_drop_entry(self):
+ drop_reflog_entry(self.f, 0)
+ log = self._read_log()
+ self.assertEqual(len(log), 2)
+ self.assertEqual(self.original_log[0:2], log)
+
+ self.f.seek(0)
+ drop_reflog_entry(self.f, 1)
+ log = self._read_log()
+ self.assertEqual(len(log), 1)
+ self.assertEqual(self.original_log[1], log[0])
+
+ def test_drop_entry_with_rewrite(self):
+ drop_reflog_entry(self.f, 1, True)
+ log = self._read_log()
+ self.assertEqual(len(log), 2)
+ self.assertEqual(self.original_log[0], log[0])
+ self.assertEqual(self.original_log[0].new_sha, log[1].old_sha)
+ self.assertEqual(self.original_log[2].new_sha, log[1].new_sha)
+
+ self.f.seek(0)
+ drop_reflog_entry(self.f, 1, True)
+ log = self._read_log()
+ self.assertEqual(len(log), 1)
+ self.assertEqual(ZERO_SHA, log[0].old_sha)
+ self.assertEqual(self.original_log[2].new_sha, log[0].new_sha)
blob - /dev/null
blob + 31e3e37254f6b8267c0129b2abd21c4b81ddec7e (mode 644)
--- /dev/null
+++ tests/test_refs.py
+# test_refs.py -- tests for refs.py
+# Copyright (C) 2013 Jelmer Vernooij <jelmer@jelmer.uk>
+#
+# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
+# General Public License as public by the Free Software Foundation; version 2.0
+# or (at your option) any later version. You can redistribute it and/or
+# modify it under the terms of either of these two licenses.
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# You should have received a copy of the licenses; if not, see
+# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
+# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
+# License, Version 2.0.
+#
+
+"""Tests for dulwich.refs."""
+
+import os
+import sys
+import tempfile
+from io import BytesIO
+from typing import ClassVar, Dict
+
+from dulwich import errors
+from dulwich.file import GitFile
+from dulwich.objects import ZERO_SHA
+from dulwich.refs import (
+ DictRefsContainer,
+ InfoRefsContainer,
+ SymrefLoop,
+ _split_ref_line,
+ check_ref_format,
+ parse_symref_value,
+ read_packed_refs,
+ read_packed_refs_with_peeled,
+ strip_peeled_refs,
+ write_packed_refs,
+)
+from dulwich.repo import Repo
+
+from . import SkipTest, TestCase
+from .utils import open_repo, tear_down_repo
+
+
+class CheckRefFormatTests(TestCase):
+ """Tests for the check_ref_format function.
+
+ These are the same tests as in the git test suite.
+ """
+
+ def test_valid(self):
+ self.assertTrue(check_ref_format(b"heads/foo"))
+ self.assertTrue(check_ref_format(b"foo/bar/baz"))
+ self.assertTrue(check_ref_format(b"refs///heads/foo"))
+ self.assertTrue(check_ref_format(b"foo./bar"))
+ self.assertTrue(check_ref_format(b"heads/foo@bar"))
+ self.assertTrue(check_ref_format(b"heads/fix.lock.error"))
+
+ def test_invalid(self):
+ self.assertFalse(check_ref_format(b"foo"))
+ self.assertFalse(check_ref_format(b"heads/foo/"))
+ self.assertFalse(check_ref_format(b"./foo"))
+ self.assertFalse(check_ref_format(b".refs/foo"))
+ self.assertFalse(check_ref_format(b"heads/foo..bar"))
+ self.assertFalse(check_ref_format(b"heads/foo?bar"))
+ self.assertFalse(check_ref_format(b"heads/foo.lock"))
+ self.assertFalse(check_ref_format(b"heads/v@{ation"))
+ self.assertFalse(check_ref_format(b"heads/foo\bar"))
+
+
+ONES = b"1" * 40
+TWOS = b"2" * 40
+THREES = b"3" * 40
+FOURS = b"4" * 40
+
+
+class PackedRefsFileTests(TestCase):
+ def test_split_ref_line_errors(self):
+ self.assertRaises(errors.PackedRefsException, _split_ref_line, b"singlefield")
+ self.assertRaises(errors.PackedRefsException, _split_ref_line, b"badsha name")
+ self.assertRaises(
+ errors.PackedRefsException,
+ _split_ref_line,
+ ONES + b" bad/../refname",
+ )
+
+ def test_read_without_peeled(self):
+ f = BytesIO(b"\n".join([b"# comment", ONES + b" ref/1", TWOS + b" ref/2"]))
+ self.assertEqual(
+ [(ONES, b"ref/1"), (TWOS, b"ref/2")], list(read_packed_refs(f))
+ )
+
+ def test_read_without_peeled_errors(self):
+ f = BytesIO(b"\n".join([ONES + b" ref/1", b"^" + TWOS]))
+ self.assertRaises(errors.PackedRefsException, list, read_packed_refs(f))
+
+ def test_read_with_peeled(self):
+ f = BytesIO(
+ b"\n".join(
+ [
+ ONES + b" ref/1",
+ TWOS + b" ref/2",
+ b"^" + THREES,
+ FOURS + b" ref/4",
+ ]
+ )
+ )
+ self.assertEqual(
+ [
+ (ONES, b"ref/1", None),
+ (TWOS, b"ref/2", THREES),
+ (FOURS, b"ref/4", None),
+ ],
+ list(read_packed_refs_with_peeled(f)),
+ )
+
+ def test_read_with_peeled_errors(self):
+ f = BytesIO(b"\n".join([b"^" + TWOS, ONES + b" ref/1"]))
+ self.assertRaises(errors.PackedRefsException, list, read_packed_refs(f))
+
+ f = BytesIO(b"\n".join([ONES + b" ref/1", b"^" + TWOS, b"^" + THREES]))
+ self.assertRaises(errors.PackedRefsException, list, read_packed_refs(f))
+
+ def test_write_with_peeled(self):
+ f = BytesIO()
+ write_packed_refs(f, {b"ref/1": ONES, b"ref/2": TWOS}, {b"ref/1": THREES})
+ self.assertEqual(
+ b"\n".join(
+ [
+ b"# pack-refs with: peeled",
+ ONES + b" ref/1",
+ b"^" + THREES,
+ TWOS + b" ref/2",
+ ]
+ )
+ + b"\n",
+ f.getvalue(),
+ )
+
+ def test_write_without_peeled(self):
+ f = BytesIO()
+ write_packed_refs(f, {b"ref/1": ONES, b"ref/2": TWOS})
+ self.assertEqual(
+ b"\n".join([ONES + b" ref/1", TWOS + b" ref/2"]) + b"\n",
+ f.getvalue(),
+ )
+
+
+# Dict of refs that we expect all RefsContainerTests subclasses to define.
+_TEST_REFS = {
+ b"HEAD": b"42d06bd4b77fed026b154d16493e5deab78f02ec",
+ b"refs/heads/40-char-ref-aaaaaaaaaaaaaaaaaa": b"42d06bd4b77fed026b154d16493e5deab78f02ec",
+ b"refs/heads/master": b"42d06bd4b77fed026b154d16493e5deab78f02ec",
+ b"refs/heads/packed": b"42d06bd4b77fed026b154d16493e5deab78f02ec",
+ b"refs/tags/refs-0.1": b"df6800012397fb85c56e7418dd4eb9405dee075c",
+ b"refs/tags/refs-0.2": b"3ec9c43c84ff242e3ef4a9fc5bc111fd780a76a8",
+ b"refs/heads/loop": b"ref: refs/heads/loop",
+}
+
+
+class RefsContainerTests:
+ def test_keys(self):
+ actual_keys = set(self._refs.keys())
+ self.assertEqual(set(self._refs.allkeys()), actual_keys)
+ self.assertEqual(set(_TEST_REFS.keys()), actual_keys)
+
+ actual_keys = self._refs.keys(b"refs/heads")
+ actual_keys.discard(b"loop")
+ self.assertEqual(
+ [b"40-char-ref-aaaaaaaaaaaaaaaaaa", b"master", b"packed"],
+ sorted(actual_keys),
+ )
+ self.assertEqual(
+ [b"refs-0.1", b"refs-0.2"], sorted(self._refs.keys(b"refs/tags"))
+ )
+
+ def test_iter(self):
+ actual_keys = set(self._refs.keys())
+ self.assertEqual(set(self._refs), actual_keys)
+ self.assertEqual(set(_TEST_REFS.keys()), actual_keys)
+
+ def test_as_dict(self):
+ # refs/heads/loop does not show up even if it exists
+ expected_refs = dict(_TEST_REFS)
+ del expected_refs[b"refs/heads/loop"]
+ self.assertEqual(expected_refs, self._refs.as_dict())
+
+ def test_get_symrefs(self):
+ self._refs.set_symbolic_ref(b"refs/heads/src", b"refs/heads/dst")
+ symrefs = self._refs.get_symrefs()
+ if b"HEAD" in symrefs:
+ symrefs.pop(b"HEAD")
+ self.assertEqual(
+ {
+ b"refs/heads/src": b"refs/heads/dst",
+ b"refs/heads/loop": b"refs/heads/loop",
+ },
+ symrefs,
+ )
+
+ def test_setitem(self):
+ self._refs[b"refs/some/ref"] = b"42d06bd4b77fed026b154d16493e5deab78f02ec"
+ self.assertEqual(
+ b"42d06bd4b77fed026b154d16493e5deab78f02ec",
+ self._refs[b"refs/some/ref"],
+ )
+ self.assertRaises(
+ errors.RefFormatError,
+ self._refs.__setitem__,
+ b"notrefs/foo",
+ b"42d06bd4b77fed026b154d16493e5deab78f02ec",
+ )
+
+ def test_set_if_equals(self):
+ nines = b"9" * 40
+ self.assertFalse(self._refs.set_if_equals(b"HEAD", b"c0ffee", nines))
+ self.assertEqual(
+ b"42d06bd4b77fed026b154d16493e5deab78f02ec", self._refs[b"HEAD"]
+ )
+
+ self.assertTrue(
+ self._refs.set_if_equals(
+ b"HEAD", b"42d06bd4b77fed026b154d16493e5deab78f02ec", nines
+ )
+ )
+ self.assertEqual(nines, self._refs[b"HEAD"])
+
+ # Setting the ref again is a no-op, but will return True.
+ self.assertTrue(self._refs.set_if_equals(b"HEAD", nines, nines))
+ self.assertEqual(nines, self._refs[b"HEAD"])
+
+ self.assertTrue(self._refs.set_if_equals(b"refs/heads/master", None, nines))
+ self.assertEqual(nines, self._refs[b"refs/heads/master"])
+
+ self.assertTrue(
+ self._refs.set_if_equals(b"refs/heads/nonexistent", ZERO_SHA, nines)
+ )
+ self.assertEqual(nines, self._refs[b"refs/heads/nonexistent"])
+
+ def test_add_if_new(self):
+ nines = b"9" * 40
+ self.assertFalse(self._refs.add_if_new(b"refs/heads/master", nines))
+ self.assertEqual(
+ b"42d06bd4b77fed026b154d16493e5deab78f02ec",
+ self._refs[b"refs/heads/master"],
+ )
+
+ self.assertTrue(self._refs.add_if_new(b"refs/some/ref", nines))
+ self.assertEqual(nines, self._refs[b"refs/some/ref"])
+
+ def test_set_symbolic_ref(self):
+ self._refs.set_symbolic_ref(b"refs/heads/symbolic", b"refs/heads/master")
+ self.assertEqual(
+ b"ref: refs/heads/master",
+ self._refs.read_loose_ref(b"refs/heads/symbolic"),
+ )
+ self.assertEqual(
+ b"42d06bd4b77fed026b154d16493e5deab78f02ec",
+ self._refs[b"refs/heads/symbolic"],
+ )
+
+ def test_set_symbolic_ref_overwrite(self):
+ nines = b"9" * 40
+ self.assertNotIn(b"refs/heads/symbolic", self._refs)
+ self._refs[b"refs/heads/symbolic"] = nines
+ self.assertEqual(nines, self._refs.read_loose_ref(b"refs/heads/symbolic"))
+ self._refs.set_symbolic_ref(b"refs/heads/symbolic", b"refs/heads/master")
+ self.assertEqual(
+ b"ref: refs/heads/master",
+ self._refs.read_loose_ref(b"refs/heads/symbolic"),
+ )
+ self.assertEqual(
+ b"42d06bd4b77fed026b154d16493e5deab78f02ec",
+ self._refs[b"refs/heads/symbolic"],
+ )
+
+ def test_check_refname(self):
+ self._refs._check_refname(b"HEAD")
+ self._refs._check_refname(b"refs/stash")
+ self._refs._check_refname(b"refs/heads/foo")
+
+ self.assertRaises(errors.RefFormatError, self._refs._check_refname, b"refs")
+ self.assertRaises(
+ errors.RefFormatError, self._refs._check_refname, b"notrefs/foo"
+ )
+
+ def test_contains(self):
+ self.assertIn(b"refs/heads/master", self._refs)
+ self.assertNotIn(b"refs/heads/bar", self._refs)
+
+ def test_delitem(self):
+ self.assertEqual(
+ b"42d06bd4b77fed026b154d16493e5deab78f02ec",
+ self._refs[b"refs/heads/master"],
+ )
+ del self._refs[b"refs/heads/master"]
+ self.assertRaises(KeyError, lambda: self._refs[b"refs/heads/master"])
+
+ def test_remove_if_equals(self):
+ self.assertFalse(self._refs.remove_if_equals(b"HEAD", b"c0ffee"))
+ self.assertEqual(
+ b"42d06bd4b77fed026b154d16493e5deab78f02ec", self._refs[b"HEAD"]
+ )
+ self.assertTrue(
+ self._refs.remove_if_equals(
+ b"refs/tags/refs-0.2",
+ b"3ec9c43c84ff242e3ef4a9fc5bc111fd780a76a8",
+ )
+ )
+ self.assertTrue(self._refs.remove_if_equals(b"refs/tags/refs-0.2", ZERO_SHA))
+ self.assertNotIn(b"refs/tags/refs-0.2", self._refs)
+
+ def test_import_refs_name(self):
+ self._refs[b"refs/remotes/origin/other"] = (
+ b"48d01bd4b77fed026b154d16493e5deab78f02ec"
+ )
+ self._refs.import_refs(
+ b"refs/remotes/origin",
+ {b"master": b"42d06bd4b77fed026b154d16493e5deab78f02ec"},
+ )
+ self.assertEqual(
+ b"42d06bd4b77fed026b154d16493e5deab78f02ec",
+ self._refs[b"refs/remotes/origin/master"],
+ )
+ self.assertEqual(
+ b"48d01bd4b77fed026b154d16493e5deab78f02ec",
+ self._refs[b"refs/remotes/origin/other"],
+ )
+
+ def test_import_refs_name_prune(self):
+ self._refs[b"refs/remotes/origin/other"] = (
+ b"48d01bd4b77fed026b154d16493e5deab78f02ec"
+ )
+ self._refs.import_refs(
+ b"refs/remotes/origin",
+ {b"master": b"42d06bd4b77fed026b154d16493e5deab78f02ec"},
+ prune=True,
+ )
+ self.assertEqual(
+ b"42d06bd4b77fed026b154d16493e5deab78f02ec",
+ self._refs[b"refs/remotes/origin/master"],
+ )
+ self.assertNotIn(b"refs/remotes/origin/other", self._refs)
+
+
+class DictRefsContainerTests(RefsContainerTests, TestCase):
+ def setUp(self):
+ TestCase.setUp(self)
+ self._refs = DictRefsContainer(dict(_TEST_REFS))
+
+ def test_invalid_refname(self):
+ # FIXME: Move this test into RefsContainerTests, but requires
+ # some way of injecting invalid refs.
+ self._refs._refs[b"refs/stash"] = b"00" * 20
+ expected_refs = dict(_TEST_REFS)
+ del expected_refs[b"refs/heads/loop"]
+ expected_refs[b"refs/stash"] = b"00" * 20
+ self.assertEqual(expected_refs, self._refs.as_dict())
+
+
+class DiskRefsContainerTests(RefsContainerTests, TestCase):
+ def setUp(self):
+ TestCase.setUp(self)
+ self._repo = open_repo("refs.git")
+ self.addCleanup(tear_down_repo, self._repo)
+ self._refs = self._repo.refs
+
+ def test_get_packed_refs(self):
+ self.assertEqual(
+ {
+ b"refs/heads/packed": b"42d06bd4b77fed026b154d16493e5deab78f02ec",
+ b"refs/tags/refs-0.1": b"df6800012397fb85c56e7418dd4eb9405dee075c",
+ },
+ self._refs.get_packed_refs(),
+ )
+
+ def test_get_peeled_not_packed(self):
+ # not packed
+ self.assertEqual(None, self._refs.get_peeled(b"refs/tags/refs-0.2"))
+ self.assertEqual(
+ b"3ec9c43c84ff242e3ef4a9fc5bc111fd780a76a8",
+ self._refs[b"refs/tags/refs-0.2"],
+ )
+
+ # packed, known not peelable
+ self.assertEqual(
+ self._refs[b"refs/heads/packed"],
+ self._refs.get_peeled(b"refs/heads/packed"),
+ )
+
+ # packed, peeled
+ self.assertEqual(
+ b"42d06bd4b77fed026b154d16493e5deab78f02ec",
+ self._refs.get_peeled(b"refs/tags/refs-0.1"),
+ )
+
+ def test_setitem(self):
+ RefsContainerTests.test_setitem(self)
+ path = os.path.join(self._refs.path, b"refs", b"some", b"ref")
+ with open(path, "rb") as f:
+ self.assertEqual(b"42d06bd4b77fed026b154d16493e5deab78f02ec", f.read()[:40])
+
+ self.assertRaises(
+ OSError,
+ self._refs.__setitem__,
+ b"refs/some/ref/sub",
+ b"42d06bd4b77fed026b154d16493e5deab78f02ec",
+ )
+
+ def test_delete_refs_container(self):
+ # We shouldn't delete the refs directory
+ self._refs[b"refs/heads/blah"] = b"42d06bd4b77fed026b154d16493e5deab78f02ec"
+ for ref in self._refs.allkeys():
+ del self._refs[ref]
+ self.assertTrue(os.path.exists(os.path.join(self._refs.path, b"refs")))
+
+ def test_setitem_packed(self):
+ with open(os.path.join(self._refs.path, b"packed-refs"), "w") as f:
+ f.write("# pack-refs with: peeled fully-peeled sorted \n")
+ f.write("42d06bd4b77fed026b154d16493e5deab78f02ec refs/heads/packed\n")
+
+ # It's allowed to set a new ref on a packed ref, the new ref will be
+ # placed outside on refs/
+ self._refs[b"refs/heads/packed"] = b"3ec9c43c84ff242e3ef4a9fc5bc111fd780a76a8"
+ packed_ref_path = os.path.join(self._refs.path, b"refs", b"heads", b"packed")
+ with open(packed_ref_path, "rb") as f:
+ self.assertEqual(b"3ec9c43c84ff242e3ef4a9fc5bc111fd780a76a8", f.read()[:40])
+
+ self.assertRaises(
+ OSError,
+ self._refs.__setitem__,
+ b"refs/heads/packed/sub",
+ b"42d06bd4b77fed026b154d16493e5deab78f02ec",
+ )
+
+ # this shouldn't overwrite the packed refs
+ self.assertEqual(
+ {b"refs/heads/packed": b"42d06bd4b77fed026b154d16493e5deab78f02ec"},
+ self._refs.get_packed_refs(),
+ )
+
+ def test_add_packed_refs(self):
+ # first, create a non-packed ref
+ self._refs[b"refs/heads/packed"] = b"3ec9c43c84ff242e3ef4a9fc5bc111fd780a76a8"
+
+ packed_ref_path = os.path.join(self._refs.path, b"refs", b"heads", b"packed")
+ self.assertTrue(os.path.exists(packed_ref_path))
+
+ # now overwrite that with a packed ref
+ packed_refs_file_path = os.path.join(self._refs.path, b"packed-refs")
+ self._refs.add_packed_refs(
+ {
+ b"refs/heads/packed": b"42d06bd4b77fed026b154d16493e5deab78f02ec",
+ }
+ )
+
+ # that should kill the file
+ self.assertFalse(os.path.exists(packed_ref_path))
+
+ # now delete the packed ref
+ self._refs.add_packed_refs(
+ {
+ b"refs/heads/packed": None,
+ }
+ )
+
+ # and it's gone!
+ self.assertFalse(os.path.exists(packed_ref_path))
+
+ self.assertRaises(
+ KeyError,
+ self._refs.__getitem__,
+ b"refs/heads/packed",
+ )
+
+ # just in case, make sure we can't pack HEAD
+ self.assertRaises(
+ ValueError,
+ self._refs.add_packed_refs,
+ {b"HEAD": "02ac81614bcdbd585a37b4b0edf8cb8a"},
+ )
+
+ # delete all packed refs
+ self._refs.add_packed_refs({ref: None for ref in self._refs.get_packed_refs()})
+
+ self.assertEqual({}, self._refs.get_packed_refs())
+
+ # remove the packed ref file, and check that adding nothing doesn't affect that
+ os.remove(packed_refs_file_path)
+
+ # adding nothing doesn't make it reappear
+ self._refs.add_packed_refs({})
+
+ self.assertFalse(os.path.exists(packed_refs_file_path))
+
+ def test_setitem_symbolic(self):
+ ones = b"1" * 40
+ self._refs[b"HEAD"] = ones
+ self.assertEqual(ones, self._refs[b"HEAD"])
+
+ # ensure HEAD was not modified
+ f = open(os.path.join(self._refs.path, b"HEAD"), "rb")
+ v = next(iter(f)).rstrip(b"\n\r")
+ f.close()
+ self.assertEqual(b"ref: refs/heads/master", v)
+
+ # ensure the symbolic link was written through
+ f = open(os.path.join(self._refs.path, b"refs", b"heads", b"master"), "rb")
+ self.assertEqual(ones, f.read()[:40])
+ f.close()
+
+ def test_set_if_equals(self):
+ RefsContainerTests.test_set_if_equals(self)
+
+ # ensure symref was followed
+ self.assertEqual(b"9" * 40, self._refs[b"refs/heads/master"])
+
+ # ensure lockfile was deleted
+ self.assertFalse(
+ os.path.exists(
+ os.path.join(self._refs.path, b"refs", b"heads", b"master.lock")
+ )
+ )
+ self.assertFalse(os.path.exists(os.path.join(self._refs.path, b"HEAD.lock")))
+
+ def test_add_if_new_packed(self):
+ # don't overwrite packed ref
+ self.assertFalse(self._refs.add_if_new(b"refs/tags/refs-0.1", b"9" * 40))
+ self.assertEqual(
+ b"df6800012397fb85c56e7418dd4eb9405dee075c",
+ self._refs[b"refs/tags/refs-0.1"],
+ )
+
+ def test_add_if_new_symbolic(self):
+ # Use an empty repo instead of the default.
+ repo_dir = os.path.join(tempfile.mkdtemp(), "test")
+ os.makedirs(repo_dir)
+ repo = Repo.init(repo_dir)
+ self.addCleanup(tear_down_repo, repo)
+ refs = repo.refs
+
+ nines = b"9" * 40
+ self.assertEqual(b"ref: refs/heads/master", refs.read_ref(b"HEAD"))
+ self.assertNotIn(b"refs/heads/master", refs)
+ self.assertTrue(refs.add_if_new(b"HEAD", nines))
+ self.assertEqual(b"ref: refs/heads/master", refs.read_ref(b"HEAD"))
+ self.assertEqual(nines, refs[b"HEAD"])
+ self.assertEqual(nines, refs[b"refs/heads/master"])
+ self.assertFalse(refs.add_if_new(b"HEAD", b"1" * 40))
+ self.assertEqual(nines, refs[b"HEAD"])
+ self.assertEqual(nines, refs[b"refs/heads/master"])
+
+ def test_follow(self):
+ self.assertEqual(
+ (
+ [b"HEAD", b"refs/heads/master"],
+ b"42d06bd4b77fed026b154d16493e5deab78f02ec",
+ ),
+ self._refs.follow(b"HEAD"),
+ )
+ self.assertEqual(
+ (
+ [b"refs/heads/master"],
+ b"42d06bd4b77fed026b154d16493e5deab78f02ec",
+ ),
+ self._refs.follow(b"refs/heads/master"),
+ )
+ self.assertRaises(SymrefLoop, self._refs.follow, b"refs/heads/loop")
+
+ def test_set_overwrite_loop(self):
+ self.assertRaises(SymrefLoop, self._refs.follow, b"refs/heads/loop")
+ self._refs[b"refs/heads/loop"] = b"42d06bd4b77fed026b154d16493e5deab78f02ec"
+ self.assertEqual(
+ ([b"refs/heads/loop"], b"42d06bd4b77fed026b154d16493e5deab78f02ec"),
+ self._refs.follow(b"refs/heads/loop"),
+ )
+
+ def test_delitem(self):
+ RefsContainerTests.test_delitem(self)
+ ref_file = os.path.join(self._refs.path, b"refs", b"heads", b"master")
+ self.assertFalse(os.path.exists(ref_file))
+ self.assertNotIn(b"refs/heads/master", self._refs.get_packed_refs())
+
+ def test_delitem_symbolic(self):
+ self.assertEqual(b"ref: refs/heads/master", self._refs.read_loose_ref(b"HEAD"))
+ del self._refs[b"HEAD"]
+ self.assertRaises(KeyError, lambda: self._refs[b"HEAD"])
+ self.assertEqual(
+ b"42d06bd4b77fed026b154d16493e5deab78f02ec",
+ self._refs[b"refs/heads/master"],
+ )
+ self.assertFalse(os.path.exists(os.path.join(self._refs.path, b"HEAD")))
+
+ def test_remove_if_equals_symref(self):
+ # HEAD is a symref, so shouldn't equal its dereferenced value
+ self.assertFalse(
+ self._refs.remove_if_equals(
+ b"HEAD", b"42d06bd4b77fed026b154d16493e5deab78f02ec"
+ )
+ )
+ self.assertTrue(
+ self._refs.remove_if_equals(
+ b"refs/heads/master",
+ b"42d06bd4b77fed026b154d16493e5deab78f02ec",
+ )
+ )
+ self.assertRaises(KeyError, lambda: self._refs[b"refs/heads/master"])
+
+ # HEAD is now a broken symref
+ self.assertRaises(KeyError, lambda: self._refs[b"HEAD"])
+ self.assertEqual(b"ref: refs/heads/master", self._refs.read_loose_ref(b"HEAD"))
+
+ self.assertFalse(
+ os.path.exists(
+ os.path.join(self._refs.path, b"refs", b"heads", b"master.lock")
+ )
+ )
+ self.assertFalse(os.path.exists(os.path.join(self._refs.path, b"HEAD.lock")))
+
+ def test_remove_packed_without_peeled(self):
+ refs_file = os.path.join(self._repo.path, "packed-refs")
+ f = GitFile(refs_file)
+ refs_data = f.read()
+ f.close()
+ f = GitFile(refs_file, "wb")
+ f.write(
+ b"\n".join(
+ line
+ for line in refs_data.split(b"\n")
+ if not line or line[0] not in b"#^"
+ )
+ )
+ f.close()
+ self._repo = Repo(self._repo.path)
+ refs = self._repo.refs
+ self.assertTrue(
+ refs.remove_if_equals(
+ b"refs/heads/packed",
+ b"42d06bd4b77fed026b154d16493e5deab78f02ec",
+ )
+ )
+
+ def test_remove_if_equals_packed(self):
+ # test removing ref that is only packed
+ self.assertEqual(
+ b"df6800012397fb85c56e7418dd4eb9405dee075c",
+ self._refs[b"refs/tags/refs-0.1"],
+ )
+ self.assertTrue(
+ self._refs.remove_if_equals(
+ b"refs/tags/refs-0.1",
+ b"df6800012397fb85c56e7418dd4eb9405dee075c",
+ )
+ )
+ self.assertRaises(KeyError, lambda: self._refs[b"refs/tags/refs-0.1"])
+
+ def test_remove_parent(self):
+ self._refs[b"refs/heads/foo/bar"] = b"df6800012397fb85c56e7418dd4eb9405dee075c"
+ del self._refs[b"refs/heads/foo/bar"]
+ ref_file = os.path.join(
+ self._refs.path,
+ b"refs",
+ b"heads",
+ b"foo",
+ b"bar",
+ )
+ self.assertFalse(os.path.exists(ref_file))
+ ref_file = os.path.join(self._refs.path, b"refs", b"heads", b"foo")
+ self.assertFalse(os.path.exists(ref_file))
+ ref_file = os.path.join(self._refs.path, b"refs", b"heads")
+ self.assertTrue(os.path.exists(ref_file))
+ self._refs[b"refs/heads/foo"] = b"df6800012397fb85c56e7418dd4eb9405dee075c"
+
+ def test_read_ref(self):
+ self.assertEqual(b"ref: refs/heads/master", self._refs.read_ref(b"HEAD"))
+ self.assertEqual(
+ b"42d06bd4b77fed026b154d16493e5deab78f02ec",
+ self._refs.read_ref(b"refs/heads/packed"),
+ )
+ self.assertEqual(None, self._refs.read_ref(b"nonexistent"))
+
+ def test_read_loose_ref(self):
+ self._refs[b"refs/heads/foo"] = b"df6800012397fb85c56e7418dd4eb9405dee075c"
+
+ self.assertEqual(None, self._refs.read_ref(b"refs/heads/foo/bar"))
+
+ def test_non_ascii(self):
+ try:
+ encoded_ref = os.fsencode("refs/tags/schön")
+ except UnicodeEncodeError as exc:
+ raise SkipTest(
+ "filesystem encoding doesn't support special character"
+ ) from exc
+ p = os.path.join(os.fsencode(self._repo.path), encoded_ref)
+ with open(p, "w") as f:
+ f.write("00" * 20)
+
+ expected_refs = dict(_TEST_REFS)
+ expected_refs[encoded_ref] = b"00" * 20
+ del expected_refs[b"refs/heads/loop"]
+
+ self.assertEqual(expected_refs, self._repo.get_refs())
+
+ def test_cyrillic(self):
+ if sys.platform in ("darwin", "win32"):
+ raise SkipTest("filesystem encoding doesn't support arbitrary bytes")
+ # reported in https://github.com/dulwich/dulwich/issues/608
+ name = b"\xcd\xee\xe2\xe0\xff\xe2\xe5\xf2\xea\xe01"
+ encoded_ref = b"refs/heads/" + name
+ with open(os.path.join(os.fsencode(self._repo.path), encoded_ref), "w") as f:
+ f.write("00" * 20)
+
+ expected_refs = set(_TEST_REFS.keys())
+ expected_refs.add(encoded_ref)
+
+ self.assertEqual(expected_refs, set(self._repo.refs.allkeys()))
+ self.assertEqual(
+ {r[len(b"refs/") :] for r in expected_refs if r.startswith(b"refs/")},
+ set(self._repo.refs.subkeys(b"refs/")),
+ )
+ expected_refs.remove(b"refs/heads/loop")
+ expected_refs.add(b"HEAD")
+ self.assertEqual(expected_refs, set(self._repo.get_refs().keys()))
+
+
+_TEST_REFS_SERIALIZED = (
+ b"42d06bd4b77fed026b154d16493e5deab78f02ec\t"
+ b"refs/heads/40-char-ref-aaaaaaaaaaaaaaaaaa\n"
+ b"42d06bd4b77fed026b154d16493e5deab78f02ec\trefs/heads/master\n"
+ b"42d06bd4b77fed026b154d16493e5deab78f02ec\trefs/heads/packed\n"
+ b"df6800012397fb85c56e7418dd4eb9405dee075c\trefs/tags/refs-0.1\n"
+ b"3ec9c43c84ff242e3ef4a9fc5bc111fd780a76a8\trefs/tags/refs-0.2\n"
+)
+
+
+class InfoRefsContainerTests(TestCase):
+ def test_invalid_refname(self):
+ text = _TEST_REFS_SERIALIZED + b"00" * 20 + b"\trefs/stash\n"
+ refs = InfoRefsContainer(BytesIO(text))
+ expected_refs = dict(_TEST_REFS)
+ del expected_refs[b"HEAD"]
+ expected_refs[b"refs/stash"] = b"00" * 20
+ del expected_refs[b"refs/heads/loop"]
+ self.assertEqual(expected_refs, refs.as_dict())
+
+ def test_keys(self):
+ refs = InfoRefsContainer(BytesIO(_TEST_REFS_SERIALIZED))
+ actual_keys = set(refs.keys())
+ self.assertEqual(set(refs.allkeys()), actual_keys)
+ expected_refs = dict(_TEST_REFS)
+ del expected_refs[b"HEAD"]
+ del expected_refs[b"refs/heads/loop"]
+ self.assertEqual(set(expected_refs.keys()), actual_keys)
+
+ actual_keys = refs.keys(b"refs/heads")
+ actual_keys.discard(b"loop")
+ self.assertEqual(
+ [b"40-char-ref-aaaaaaaaaaaaaaaaaa", b"master", b"packed"],
+ sorted(actual_keys),
+ )
+ self.assertEqual([b"refs-0.1", b"refs-0.2"], sorted(refs.keys(b"refs/tags")))
+
+ def test_as_dict(self):
+ refs = InfoRefsContainer(BytesIO(_TEST_REFS_SERIALIZED))
+ # refs/heads/loop does not show up even if it exists
+ expected_refs = dict(_TEST_REFS)
+ del expected_refs[b"HEAD"]
+ del expected_refs[b"refs/heads/loop"]
+ self.assertEqual(expected_refs, refs.as_dict())
+
+ def test_contains(self):
+ refs = InfoRefsContainer(BytesIO(_TEST_REFS_SERIALIZED))
+ self.assertIn(b"refs/heads/master", refs)
+ self.assertNotIn(b"refs/heads/bar", refs)
+
+ def test_get_peeled(self):
+ refs = InfoRefsContainer(BytesIO(_TEST_REFS_SERIALIZED))
+ # refs/heads/loop does not show up even if it exists
+ self.assertEqual(
+ _TEST_REFS[b"refs/heads/master"],
+ refs.get_peeled(b"refs/heads/master"),
+ )
+
+
+class ParseSymrefValueTests(TestCase):
+ def test_valid(self):
+ self.assertEqual(b"refs/heads/foo", parse_symref_value(b"ref: refs/heads/foo"))
+
+ def test_invalid(self):
+ self.assertRaises(ValueError, parse_symref_value, b"foobar")
+
+
+class StripPeeledRefsTests(TestCase):
+ all_refs: ClassVar[Dict[bytes, bytes]] = {
+ b"refs/heads/master": b"8843d7f92416211de9ebb963ff4ce28125932878",
+ b"refs/heads/testing": b"186a005b134d8639a58b6731c7c1ea821a6eedba",
+ b"refs/tags/1.0.0": b"a93db4b0360cc635a2b93675010bac8d101f73f0",
+ b"refs/tags/1.0.0^{}": b"a93db4b0360cc635a2b93675010bac8d101f73f0",
+ b"refs/tags/2.0.0": b"0749936d0956c661ac8f8d3483774509c165f89e",
+ b"refs/tags/2.0.0^{}": b"0749936d0956c661ac8f8d3483774509c165f89e",
+ }
+ non_peeled_refs: ClassVar[Dict[bytes, bytes]] = {
+ b"refs/heads/master": b"8843d7f92416211de9ebb963ff4ce28125932878",
+ b"refs/heads/testing": b"186a005b134d8639a58b6731c7c1ea821a6eedba",
+ b"refs/tags/1.0.0": b"a93db4b0360cc635a2b93675010bac8d101f73f0",
+ b"refs/tags/2.0.0": b"0749936d0956c661ac8f8d3483774509c165f89e",
+ }
+
+ def test_strip_peeled_refs(self):
+ # Simple check of two dicts
+ self.assertEqual(strip_peeled_refs(self.all_refs), self.non_peeled_refs)
blob - /dev/null
blob + 80605d1b70ff8bbce62d274533238b05e3b62a82 (mode 644)
--- /dev/null
+++ tests/test_repository.py
+# test_repository.py -- tests for repository.py
+# Copyright (C) 2007 James Westby <jw+debian@jameswestby.net>
+#
+# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
+# General Public License as public by the Free Software Foundation; version 2.0
+# or (at your option) any later version. You can redistribute it and/or
+# modify it under the terms of either of these two licenses.
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# You should have received a copy of the licenses; if not, see
+# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
+# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
+# License, Version 2.0.
+#
+
+"""Tests for the repository."""
+
+import glob
+import locale
+import os
+import shutil
+import stat
+import sys
+import tempfile
+import warnings
+
+from dulwich import errors, objects, porcelain
+from dulwich.config import Config
+from dulwich.errors import NotGitRepository
+from dulwich.object_store import tree_lookup_path
+from dulwich.repo import (
+ InvalidUserIdentity,
+ MemoryRepo,
+ Repo,
+ UnsupportedExtension,
+ UnsupportedVersion,
+ check_user_identity,
+)
+
+from . import TestCase, skipIf
+from .utils import open_repo, setup_warning_catcher, tear_down_repo
+
+missing_sha = b"b91fa4d900e17e99b433218e988c4eb4a3e9a097"
+
+
+class CreateRepositoryTests(TestCase):
+ def assertFileContentsEqual(self, expected, repo, path):
+ f = repo.get_named_file(path)
+ if not f:
+ self.assertEqual(expected, None)
+ else:
+ with f:
+ self.assertEqual(expected, f.read())
+
+ def _check_repo_contents(self, repo, expect_bare):
+ self.assertEqual(expect_bare, repo.bare)
+ self.assertFileContentsEqual(b"Unnamed repository", repo, "description")
+ self.assertFileContentsEqual(b"", repo, os.path.join("info", "exclude"))
+ self.assertFileContentsEqual(None, repo, "nonexistent file")
+ barestr = b"bare = " + str(expect_bare).lower().encode("ascii")
+ with repo.get_named_file("config") as f:
+ config_text = f.read()
+ self.assertIn(barestr, config_text, "%r" % config_text)
+ expect_filemode = sys.platform != "win32"
+ barestr = b"filemode = " + str(expect_filemode).lower().encode("ascii")
+ with repo.get_named_file("config") as f:
+ config_text = f.read()
+ self.assertIn(barestr, config_text, "%r" % config_text)
+
+ if isinstance(repo, Repo):
+ expected_mode = "0o100644" if expect_filemode else "0o100666"
+ expected = {
+ "HEAD": expected_mode,
+ "config": expected_mode,
+ "description": expected_mode,
+ }
+ actual = {
+ f[len(repo._controldir) + 1 :]: oct(os.stat(f).st_mode)
+ for f in glob.glob(os.path.join(repo._controldir, "*"))
+ if os.path.isfile(f)
+ }
+
+ self.assertEqual(expected, actual)
+
+ def test_create_memory(self):
+ repo = MemoryRepo.init_bare([], {})
+ self._check_repo_contents(repo, True)
+
+ def test_create_disk_bare(self):
+ tmp_dir = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, tmp_dir)
+ repo = Repo.init_bare(tmp_dir)
+ self.assertEqual(tmp_dir, repo._controldir)
+ self._check_repo_contents(repo, True)
+
+ def test_create_disk_non_bare(self):
+ tmp_dir = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, tmp_dir)
+ repo = Repo.init(tmp_dir)
+ self.assertEqual(os.path.join(tmp_dir, ".git"), repo._controldir)
+ self._check_repo_contents(repo, False)
+
+ def test_create_disk_non_bare_mkdir(self):
+ tmp_dir = tempfile.mkdtemp()
+ target_dir = os.path.join(tmp_dir, "target")
+ self.addCleanup(shutil.rmtree, tmp_dir)
+ repo = Repo.init(target_dir, mkdir=True)
+ self.assertEqual(os.path.join(target_dir, ".git"), repo._controldir)
+ self._check_repo_contents(repo, False)
+
+ def test_create_disk_bare_mkdir(self):
+ tmp_dir = tempfile.mkdtemp()
+ target_dir = os.path.join(tmp_dir, "target")
+ self.addCleanup(shutil.rmtree, tmp_dir)
+ repo = Repo.init_bare(target_dir, mkdir=True)
+ self.assertEqual(target_dir, repo._controldir)
+ self._check_repo_contents(repo, True)
+
+
+class MemoryRepoTests(TestCase):
+ def test_set_description(self):
+ r = MemoryRepo.init_bare([], {})
+ description = b"Some description"
+ r.set_description(description)
+ self.assertEqual(description, r.get_description())
+
+ def test_pull_into(self):
+ r = MemoryRepo.init_bare([], {})
+ repo = open_repo("a.git")
+ self.addCleanup(tear_down_repo, repo)
+ repo.fetch(r)
+
+
+class RepositoryRootTests(TestCase):
+ def mkdtemp(self):
+ return tempfile.mkdtemp()
+
+ def open_repo(self, name):
+ temp_dir = self.mkdtemp()
+ repo = open_repo(name, temp_dir)
+ self.addCleanup(tear_down_repo, repo)
+ return repo
+
+ def test_simple_props(self):
+ r = self.open_repo("a.git")
+ self.assertEqual(r.controldir(), r.path)
+
+ def test_setitem(self):
+ r = self.open_repo("a.git")
+ r[b"refs/tags/foo"] = b"a90fa2d900a17e99b433217e988c4eb4a2e9a097"
+ self.assertEqual(
+ b"a90fa2d900a17e99b433217e988c4eb4a2e9a097", r[b"refs/tags/foo"].id
+ )
+
+ def test_getitem_unicode(self):
+ r = self.open_repo("a.git")
+
+ test_keys = [
+ (b"refs/heads/master", True),
+ (b"a90fa2d900a17e99b433217e988c4eb4a2e9a097", True),
+ (b"11" * 19 + b"--", False),
+ ]
+
+ for k, contained in test_keys:
+ self.assertEqual(k in r, contained)
+
+ # Avoid deprecation warning under Py3.2+
+ if getattr(self, "assertRaisesRegex", None):
+ assertRaisesRegexp = self.assertRaisesRegex
+ else:
+ assertRaisesRegexp = self.assertRaisesRegexp
+ for k, _ in test_keys:
+ assertRaisesRegexp(
+ TypeError,
+ "'name' must be bytestring, not int",
+ r.__getitem__,
+ 12,
+ )
+
+ def test_delitem(self):
+ r = self.open_repo("a.git")
+
+ del r[b"refs/heads/master"]
+ self.assertRaises(KeyError, lambda: r[b"refs/heads/master"])
+
+ del r[b"HEAD"]
+ self.assertRaises(KeyError, lambda: r[b"HEAD"])
+
+ self.assertRaises(ValueError, r.__delitem__, b"notrefs/foo")
+
+ def test_get_refs(self):
+ r = self.open_repo("a.git")
+ self.assertEqual(
+ {
+ b"HEAD": b"a90fa2d900a17e99b433217e988c4eb4a2e9a097",
+ b"refs/heads/master": b"a90fa2d900a17e99b433217e988c4eb4a2e9a097",
+ b"refs/tags/mytag": b"28237f4dc30d0d462658d6b937b08a0f0b6ef55a",
+ b"refs/tags/mytag-packed": b"b0931cadc54336e78a1d980420e3268903b57a50",
+ },
+ r.get_refs(),
+ )
+
+ def test_head(self):
+ r = self.open_repo("a.git")
+ self.assertEqual(r.head(), b"a90fa2d900a17e99b433217e988c4eb4a2e9a097")
+
+ def test_get_object(self):
+ r = self.open_repo("a.git")
+ obj = r.get_object(r.head())
+ self.assertEqual(obj.type_name, b"commit")
+
+ def test_get_object_non_existant(self):
+ r = self.open_repo("a.git")
+ self.assertRaises(KeyError, r.get_object, missing_sha)
+
+ def test_contains_object(self):
+ r = self.open_repo("a.git")
+ self.assertIn(r.head(), r)
+ self.assertNotIn(b"z" * 40, r)
+
+ def test_contains_ref(self):
+ r = self.open_repo("a.git")
+ self.assertIn(b"HEAD", r)
+
+ def test_get_no_description(self):
+ r = self.open_repo("a.git")
+ self.assertIs(None, r.get_description())
+
+ def test_get_description(self):
+ r = self.open_repo("a.git")
+ with open(os.path.join(r.path, "description"), "wb") as f:
+ f.write(b"Some description")
+ self.assertEqual(b"Some description", r.get_description())
+
+ def test_set_description(self):
+ r = self.open_repo("a.git")
+ description = b"Some description"
+ r.set_description(description)
+ self.assertEqual(description, r.get_description())
+
+ def test_contains_missing(self):
+ r = self.open_repo("a.git")
+ self.assertNotIn(b"bar", r)
+
+ def test_get_peeled(self):
+ # unpacked ref
+ r = self.open_repo("a.git")
+ tag_sha = b"28237f4dc30d0d462658d6b937b08a0f0b6ef55a"
+ self.assertNotEqual(r[tag_sha].sha().hexdigest(), r.head())
+ self.assertEqual(r.get_peeled(b"refs/tags/mytag"), r.head())
+
+ # packed ref with cached peeled value
+ packed_tag_sha = b"b0931cadc54336e78a1d980420e3268903b57a50"
+ parent_sha = r[r.head()].parents[0]
+ self.assertNotEqual(r[packed_tag_sha].sha().hexdigest(), parent_sha)
+ self.assertEqual(r.get_peeled(b"refs/tags/mytag-packed"), parent_sha)
+
+ # TODO: add more corner cases to test repo
+
+ def test_get_peeled_not_tag(self):
+ r = self.open_repo("a.git")
+ self.assertEqual(r.get_peeled(b"HEAD"), r.head())
+
+ def test_get_parents(self):
+ r = self.open_repo("a.git")
+ self.assertEqual(
+ [b"2a72d929692c41d8554c07f6301757ba18a65d91"],
+ r.get_parents(b"a90fa2d900a17e99b433217e988c4eb4a2e9a097"),
+ )
+ r.update_shallow([b"a90fa2d900a17e99b433217e988c4eb4a2e9a097"], None)
+ self.assertEqual([], r.get_parents(b"a90fa2d900a17e99b433217e988c4eb4a2e9a097"))
+
+ def test_get_walker(self):
+ r = self.open_repo("a.git")
+ # include defaults to [r.head()]
+ self.assertEqual(
+ [e.commit.id for e in r.get_walker()],
+ [r.head(), b"2a72d929692c41d8554c07f6301757ba18a65d91"],
+ )
+ self.assertEqual(
+ [
+ e.commit.id
+ for e in r.get_walker([b"2a72d929692c41d8554c07f6301757ba18a65d91"])
+ ],
+ [b"2a72d929692c41d8554c07f6301757ba18a65d91"],
+ )
+ self.assertEqual(
+ [
+ e.commit.id
+ for e in r.get_walker(b"2a72d929692c41d8554c07f6301757ba18a65d91")
+ ],
+ [b"2a72d929692c41d8554c07f6301757ba18a65d91"],
+ )
+
+ def assertFilesystemHidden(self, path):
+ if sys.platform != "win32":
+ return
+ import ctypes
+ from ctypes.wintypes import DWORD, LPCWSTR
+
+ GetFileAttributesW = ctypes.WINFUNCTYPE(DWORD, LPCWSTR)(
+ ("GetFileAttributesW", ctypes.windll.kernel32)
+ )
+ self.assertTrue(2 & GetFileAttributesW(path))
+
+ def test_init_existing(self):
+ tmp_dir = self.mkdtemp()
+ self.addCleanup(shutil.rmtree, tmp_dir)
+ t = Repo.init(tmp_dir)
+ self.addCleanup(t.close)
+ self.assertEqual(os.listdir(tmp_dir), [".git"])
+ self.assertFilesystemHidden(os.path.join(tmp_dir, ".git"))
+
+ def test_init_mkdir(self):
+ tmp_dir = self.mkdtemp()
+ self.addCleanup(shutil.rmtree, tmp_dir)
+ repo_dir = os.path.join(tmp_dir, "a-repo")
+
+ t = Repo.init(repo_dir, mkdir=True)
+ self.addCleanup(t.close)
+ self.assertEqual(os.listdir(repo_dir), [".git"])
+ self.assertFilesystemHidden(os.path.join(repo_dir, ".git"))
+
+ def test_init_mkdir_unicode(self):
+ repo_name = "\xa7"
+ try:
+ os.fsencode(repo_name)
+ except UnicodeEncodeError:
+ self.skipTest("filesystem lacks unicode support")
+ tmp_dir = self.mkdtemp()
+ self.addCleanup(shutil.rmtree, tmp_dir)
+ repo_dir = os.path.join(tmp_dir, repo_name)
+
+ t = Repo.init(repo_dir, mkdir=True)
+ self.addCleanup(t.close)
+ self.assertEqual(os.listdir(repo_dir), [".git"])
+ self.assertFilesystemHidden(os.path.join(repo_dir, ".git"))
+
+ @skipIf(sys.platform == "win32", "fails on Windows")
+ def test_fetch(self):
+ r = self.open_repo("a.git")
+ tmp_dir = self.mkdtemp()
+ self.addCleanup(shutil.rmtree, tmp_dir)
+ t = Repo.init(tmp_dir)
+ self.addCleanup(t.close)
+ r.fetch(t)
+ self.assertIn(b"a90fa2d900a17e99b433217e988c4eb4a2e9a097", t)
+ self.assertIn(b"a90fa2d900a17e99b433217e988c4eb4a2e9a097", t)
+ self.assertIn(b"a90fa2d900a17e99b433217e988c4eb4a2e9a097", t)
+ self.assertIn(b"28237f4dc30d0d462658d6b937b08a0f0b6ef55a", t)
+ self.assertIn(b"b0931cadc54336e78a1d980420e3268903b57a50", t)
+
+ @skipIf(sys.platform == "win32", "fails on Windows")
+ def test_fetch_ignores_missing_refs(self):
+ r = self.open_repo("a.git")
+ missing = b"1234566789123456789123567891234657373833"
+ r.refs[b"refs/heads/blah"] = missing
+ tmp_dir = self.mkdtemp()
+ self.addCleanup(shutil.rmtree, tmp_dir)
+ t = Repo.init(tmp_dir)
+ self.addCleanup(t.close)
+ r.fetch(t)
+ self.assertIn(b"a90fa2d900a17e99b433217e988c4eb4a2e9a097", t)
+ self.assertIn(b"a90fa2d900a17e99b433217e988c4eb4a2e9a097", t)
+ self.assertIn(b"a90fa2d900a17e99b433217e988c4eb4a2e9a097", t)
+ self.assertIn(b"28237f4dc30d0d462658d6b937b08a0f0b6ef55a", t)
+ self.assertIn(b"b0931cadc54336e78a1d980420e3268903b57a50", t)
+ self.assertNotIn(missing, t)
+
+ def test_clone(self):
+ r = self.open_repo("a.git")
+ tmp_dir = self.mkdtemp()
+ self.addCleanup(shutil.rmtree, tmp_dir)
+ with r.clone(tmp_dir, mkdir=False) as t:
+ self.assertEqual(
+ {
+ b"HEAD": b"a90fa2d900a17e99b433217e988c4eb4a2e9a097",
+ b"refs/remotes/origin/master": b"a90fa2d900a17e99b433217e988c4eb4a2e9a097",
+ b"refs/remotes/origin/HEAD": b"a90fa2d900a17e99b433217e988c4eb4a2e9a097",
+ b"refs/heads/master": b"a90fa2d900a17e99b433217e988c4eb4a2e9a097",
+ b"refs/tags/mytag": b"28237f4dc30d0d462658d6b937b08a0f0b6ef55a",
+ b"refs/tags/mytag-packed": b"b0931cadc54336e78a1d980420e3268903b57a50",
+ },
+ t.refs.as_dict(),
+ )
+ shas = [e.commit.id for e in r.get_walker()]
+ self.assertEqual(
+ shas, [t.head(), b"2a72d929692c41d8554c07f6301757ba18a65d91"]
+ )
+ c = t.get_config()
+ encoded_path = r.path
+ if not isinstance(encoded_path, bytes):
+ encoded_path = os.fsencode(encoded_path)
+ self.assertEqual(encoded_path, c.get((b"remote", b"origin"), b"url"))
+ self.assertEqual(
+ b"+refs/heads/*:refs/remotes/origin/*",
+ c.get((b"remote", b"origin"), b"fetch"),
+ )
+
+ def test_clone_no_head(self):
+ temp_dir = self.mkdtemp()
+ self.addCleanup(shutil.rmtree, temp_dir)
+ repo_dir = os.path.join(
+ os.path.dirname(__file__), "..", "testdata", "repos"
+ )
+ dest_dir = os.path.join(temp_dir, "a.git")
+ shutil.copytree(os.path.join(repo_dir, "a.git"), dest_dir, symlinks=True)
+ r = Repo(dest_dir)
+ self.addCleanup(r.close)
+ del r.refs[b"refs/heads/master"]
+ del r.refs[b"HEAD"]
+ t = r.clone(os.path.join(temp_dir, "b.git"), mkdir=True)
+ self.addCleanup(t.close)
+ self.assertEqual(
+ {
+ b"refs/tags/mytag": b"28237f4dc30d0d462658d6b937b08a0f0b6ef55a",
+ b"refs/tags/mytag-packed": b"b0931cadc54336e78a1d980420e3268903b57a50",
+ },
+ t.refs.as_dict(),
+ )
+
+ def test_clone_empty(self):
+ """Test clone() doesn't crash if HEAD points to a non-existing ref.
+
+ This simulates cloning server-side bare repository either when it is
+ still empty or if user renames master branch and pushes private repo
+ to the server.
+ Non-bare repo HEAD always points to an existing ref.
+ """
+ r = self.open_repo("empty.git")
+ tmp_dir = self.mkdtemp()
+ self.addCleanup(shutil.rmtree, tmp_dir)
+ r.clone(tmp_dir, mkdir=False, bare=True)
+
+ def test_reset_index_symlink_enabled(self):
+ if sys.platform == "win32":
+ self.skipTest("symlinks are not supported on Windows")
+ tmp_dir = self.mkdtemp()
+ self.addCleanup(shutil.rmtree, tmp_dir)
+
+ o = Repo.init(os.path.join(tmp_dir, "s"), mkdir=True)
+ os.symlink("foo", os.path.join(tmp_dir, "s", "bar"))
+ o.stage("bar")
+ o.do_commit(b"add symlink")
+
+ t = o.clone(os.path.join(tmp_dir, "t"), symlinks=True)
+ o.close()
+ bar_path = os.path.join(tmp_dir, "t", "bar")
+ if sys.platform == "win32":
+ with open(bar_path) as f:
+ self.assertEqual("foo", f.read())
+ else:
+ self.assertEqual("foo", os.readlink(bar_path))
+ t.close()
+
+ def test_reset_index_symlink_disabled(self):
+ tmp_dir = self.mkdtemp()
+ self.addCleanup(shutil.rmtree, tmp_dir)
+
+ o = Repo.init(os.path.join(tmp_dir, "s"), mkdir=True)
+ o.close()
+ os.symlink("foo", os.path.join(tmp_dir, "s", "bar"))
+ o.stage("bar")
+ o.do_commit(b"add symlink")
+
+ t = o.clone(os.path.join(tmp_dir, "t"), symlinks=False)
+ with open(os.path.join(tmp_dir, "t", "bar")) as f:
+ self.assertEqual("foo", f.read())
+
+ t.close()
+
+ def test_clone_bare(self):
+ r = self.open_repo("a.git")
+ tmp_dir = self.mkdtemp()
+ self.addCleanup(shutil.rmtree, tmp_dir)
+ t = r.clone(tmp_dir, mkdir=False)
+ t.close()
+
+ def test_clone_checkout_and_bare(self):
+ r = self.open_repo("a.git")
+ tmp_dir = self.mkdtemp()
+ self.addCleanup(shutil.rmtree, tmp_dir)
+ self.assertRaises(
+ ValueError, r.clone, tmp_dir, mkdir=False, checkout=True, bare=True
+ )
+
+ def test_clone_branch(self):
+ r = self.open_repo("a.git")
+ r.refs[b"refs/heads/mybranch"] = b"28237f4dc30d0d462658d6b937b08a0f0b6ef55a"
+ tmp_dir = self.mkdtemp()
+ self.addCleanup(shutil.rmtree, tmp_dir)
+ with r.clone(tmp_dir, mkdir=False, branch=b"mybranch") as t:
+ # HEAD should point to specified branch and not origin HEAD
+ chain, sha = t.refs.follow(b"HEAD")
+ self.assertEqual(chain[-1], b"refs/heads/mybranch")
+ self.assertEqual(sha, b"28237f4dc30d0d462658d6b937b08a0f0b6ef55a")
+ self.assertEqual(
+ t.refs[b"refs/remotes/origin/HEAD"],
+ b"a90fa2d900a17e99b433217e988c4eb4a2e9a097",
+ )
+
+ def test_clone_tag(self):
+ r = self.open_repo("a.git")
+ tmp_dir = self.mkdtemp()
+ self.addCleanup(shutil.rmtree, tmp_dir)
+ with r.clone(tmp_dir, mkdir=False, branch=b"mytag") as t:
+ # HEAD should be detached (and not a symbolic ref) at tag
+ self.assertEqual(
+ t.refs.read_ref(b"HEAD"),
+ b"28237f4dc30d0d462658d6b937b08a0f0b6ef55a",
+ )
+ self.assertEqual(
+ t.refs[b"refs/remotes/origin/HEAD"],
+ b"a90fa2d900a17e99b433217e988c4eb4a2e9a097",
+ )
+
+ def test_clone_invalid_branch(self):
+ r = self.open_repo("a.git")
+ tmp_dir = self.mkdtemp()
+ self.addCleanup(shutil.rmtree, tmp_dir)
+ self.assertRaises(
+ ValueError,
+ r.clone,
+ tmp_dir,
+ mkdir=False,
+ branch=b"mybranch",
+ )
+
+ def test_merge_history(self):
+ r = self.open_repo("simple_merge.git")
+ shas = [e.commit.id for e in r.get_walker()]
+ self.assertEqual(
+ shas,
+ [
+ b"5dac377bdded4c9aeb8dff595f0faeebcc8498cc",
+ b"ab64bbdcc51b170d21588e5c5d391ee5c0c96dfd",
+ b"4cffe90e0a41ad3f5190079d7c8f036bde29cbe6",
+ b"60dacdc733de308bb77bb76ce0fb0f9b44c9769e",
+ b"0d89f20333fbb1d2f3a94da77f4981373d8f4310",
+ ],
+ )
+
+ def test_out_of_order_merge(self):
+ """Test that revision history is ordered by date, not parent order."""
+ r = self.open_repo("ooo_merge.git")
+ shas = [e.commit.id for e in r.get_walker()]
+ self.assertEqual(
+ shas,
+ [
+ b"7601d7f6231db6a57f7bbb79ee52e4d462fd44d1",
+ b"f507291b64138b875c28e03469025b1ea20bc614",
+ b"fb5b0425c7ce46959bec94d54b9a157645e114f5",
+ b"f9e39b120c68182a4ba35349f832d0e4e61f485c",
+ ],
+ )
+
+ def test_get_tags_empty(self):
+ r = self.open_repo("ooo_merge.git")
+ self.assertEqual({}, r.refs.as_dict(b"refs/tags"))
+
+ def test_get_config(self):
+ r = self.open_repo("ooo_merge.git")
+ self.assertIsInstance(r.get_config(), Config)
+
+ def test_get_config_stack(self):
+ r = self.open_repo("ooo_merge.git")
+ self.assertIsInstance(r.get_config_stack(), Config)
+
+ def test_common_revisions(self):
+ """This test demonstrates that ``find_common_revisions()`` actually
+ returns common heads, not revisions; dulwich already uses
+ ``find_common_revisions()`` in such a manner (see
+ ``Repo.find_objects()``).
+ """
+ expected_shas = {b"60dacdc733de308bb77bb76ce0fb0f9b44c9769e"}
+
+ # Source for objects.
+ r_base = self.open_repo("simple_merge.git")
+
+ # Re-create each-side of the merge in simple_merge.git.
+ #
+ # Since the trees and blobs are missing, the repository created is
+ # corrupted, but we're only checking for commits for the purpose of
+ # this test, so it's immaterial.
+ r1_dir = self.mkdtemp()
+ self.addCleanup(shutil.rmtree, r1_dir)
+ r1_commits = [
+ b"ab64bbdcc51b170d21588e5c5d391ee5c0c96dfd", # HEAD
+ b"60dacdc733de308bb77bb76ce0fb0f9b44c9769e",
+ b"0d89f20333fbb1d2f3a94da77f4981373d8f4310",
+ ]
+
+ r2_dir = self.mkdtemp()
+ self.addCleanup(shutil.rmtree, r2_dir)
+ r2_commits = [
+ b"4cffe90e0a41ad3f5190079d7c8f036bde29cbe6", # HEAD
+ b"60dacdc733de308bb77bb76ce0fb0f9b44c9769e",
+ b"0d89f20333fbb1d2f3a94da77f4981373d8f4310",
+ ]
+
+ r1 = Repo.init_bare(r1_dir)
+ for c in r1_commits:
+ r1.object_store.add_object(r_base.get_object(c))
+ r1.refs[b"HEAD"] = r1_commits[0]
+
+ r2 = Repo.init_bare(r2_dir)
+ for c in r2_commits:
+ r2.object_store.add_object(r_base.get_object(c))
+ r2.refs[b"HEAD"] = r2_commits[0]
+
+ # Finally, the 'real' testing!
+ shas = r2.object_store.find_common_revisions(r1.get_graph_walker())
+ self.assertEqual(set(shas), expected_shas)
+
+ shas = r1.object_store.find_common_revisions(r2.get_graph_walker())
+ self.assertEqual(set(shas), expected_shas)
+
+ def test_shell_hook_pre_commit(self):
+ if os.name != "posix":
+ self.skipTest("shell hook tests requires POSIX shell")
+
+ pre_commit_fail = """#!/bin/sh
+exit 1
+"""
+
+ pre_commit_success = """#!/bin/sh
+exit 0
+"""
+
+ repo_dir = os.path.join(self.mkdtemp())
+ self.addCleanup(shutil.rmtree, repo_dir)
+ r = Repo.init(repo_dir)
+ self.addCleanup(r.close)
+
+ pre_commit = os.path.join(r.controldir(), "hooks", "pre-commit")
+
+ with open(pre_commit, "w") as f:
+ f.write(pre_commit_fail)
+ os.chmod(pre_commit, stat.S_IREAD | stat.S_IWRITE | stat.S_IEXEC)
+
+ self.assertRaises(
+ errors.CommitError,
+ r.do_commit,
+ b"failed commit",
+ committer=b"Test Committer <test@nodomain.com>",
+ author=b"Test Author <test@nodomain.com>",
+ commit_timestamp=12345,
+ commit_timezone=0,
+ author_timestamp=12345,
+ author_timezone=0,
+ )
+
+ with open(pre_commit, "w") as f:
+ f.write(pre_commit_success)
+ os.chmod(pre_commit, stat.S_IREAD | stat.S_IWRITE | stat.S_IEXEC)
+
+ commit_sha = r.do_commit(
+ b"empty commit",
+ committer=b"Test Committer <test@nodomain.com>",
+ author=b"Test Author <test@nodomain.com>",
+ commit_timestamp=12395,
+ commit_timezone=0,
+ author_timestamp=12395,
+ author_timezone=0,
+ )
+ self.assertEqual([], r[commit_sha].parents)
+
+ def test_shell_hook_commit_msg(self):
+ if os.name != "posix":
+ self.skipTest("shell hook tests requires POSIX shell")
+
+ commit_msg_fail = """#!/bin/sh
+exit 1
+"""
+
+ commit_msg_success = """#!/bin/sh
+exit 0
+"""
+
+ repo_dir = self.mkdtemp()
+ self.addCleanup(shutil.rmtree, repo_dir)
+ r = Repo.init(repo_dir)
+ self.addCleanup(r.close)
+
+ commit_msg = os.path.join(r.controldir(), "hooks", "commit-msg")
+
+ with open(commit_msg, "w") as f:
+ f.write(commit_msg_fail)
+ os.chmod(commit_msg, stat.S_IREAD | stat.S_IWRITE | stat.S_IEXEC)
+
+ self.assertRaises(
+ errors.CommitError,
+ r.do_commit,
+ b"failed commit",
+ committer=b"Test Committer <test@nodomain.com>",
+ author=b"Test Author <test@nodomain.com>",
+ commit_timestamp=12345,
+ commit_timezone=0,
+ author_timestamp=12345,
+ author_timezone=0,
+ )
+
+ with open(commit_msg, "w") as f:
+ f.write(commit_msg_success)
+ os.chmod(commit_msg, stat.S_IREAD | stat.S_IWRITE | stat.S_IEXEC)
+
+ commit_sha = r.do_commit(
+ b"empty commit",
+ committer=b"Test Committer <test@nodomain.com>",
+ author=b"Test Author <test@nodomain.com>",
+ commit_timestamp=12395,
+ commit_timezone=0,
+ author_timestamp=12395,
+ author_timezone=0,
+ )
+ self.assertEqual([], r[commit_sha].parents)
+
+ def test_shell_hook_pre_commit_add_files(self):
+ if os.name != "posix":
+ self.skipTest("shell hook tests requires POSIX shell")
+
+ pre_commit_contents = """#!{executable}
+import sys
+sys.path.extend({path!r})
+from dulwich.repo import Repo
+
+with open('foo', 'w') as f:
+ f.write('newfile')
+
+r = Repo('.')
+r.stage(['foo'])
+""".format(
+ executable=sys.executable,
+ path=[os.path.join(os.path.dirname(__file__), "..", ".."), *sys.path],
+ )
+
+ repo_dir = os.path.join(self.mkdtemp())
+ self.addCleanup(shutil.rmtree, repo_dir)
+ r = Repo.init(repo_dir)
+ self.addCleanup(r.close)
+
+ with open(os.path.join(repo_dir, "blah"), "w") as f:
+ f.write("blah")
+
+ r.stage(["blah"])
+
+ pre_commit = os.path.join(r.controldir(), "hooks", "pre-commit")
+
+ with open(pre_commit, "w") as f:
+ f.write(pre_commit_contents)
+ os.chmod(pre_commit, stat.S_IREAD | stat.S_IWRITE | stat.S_IEXEC)
+
+ commit_sha = r.do_commit(
+ b"new commit",
+ committer=b"Test Committer <test@nodomain.com>",
+ author=b"Test Author <test@nodomain.com>",
+ commit_timestamp=12395,
+ commit_timezone=0,
+ author_timestamp=12395,
+ author_timezone=0,
+ )
+ self.assertEqual([], r[commit_sha].parents)
+
+ tree = r[r[commit_sha].tree]
+ self.assertEqual({b"blah", b"foo"}, set(tree))
+
+ def test_shell_hook_post_commit(self):
+ if os.name != "posix":
+ self.skipTest("shell hook tests requires POSIX shell")
+
+ repo_dir = self.mkdtemp()
+ self.addCleanup(shutil.rmtree, repo_dir)
+
+ r = Repo.init(repo_dir)
+ self.addCleanup(r.close)
+
+ (fd, path) = tempfile.mkstemp(dir=repo_dir)
+ os.close(fd)
+ post_commit_msg = (
+ """#!/bin/sh
+rm """
+ + path
+ + """
+"""
+ )
+
+ root_sha = r.do_commit(
+ b"empty commit",
+ committer=b"Test Committer <test@nodomain.com>",
+ author=b"Test Author <test@nodomain.com>",
+ commit_timestamp=12345,
+ commit_timezone=0,
+ author_timestamp=12345,
+ author_timezone=0,
+ )
+ self.assertEqual([], r[root_sha].parents)
+
+ post_commit = os.path.join(r.controldir(), "hooks", "post-commit")
+
+ with open(post_commit, "wb") as f:
+ f.write(post_commit_msg.encode(locale.getpreferredencoding()))
+ os.chmod(post_commit, stat.S_IREAD | stat.S_IWRITE | stat.S_IEXEC)
+
+ commit_sha = r.do_commit(
+ b"empty commit",
+ committer=b"Test Committer <test@nodomain.com>",
+ author=b"Test Author <test@nodomain.com>",
+ commit_timestamp=12345,
+ commit_timezone=0,
+ author_timestamp=12345,
+ author_timezone=0,
+ )
+ self.assertEqual([root_sha], r[commit_sha].parents)
+
+ self.assertFalse(os.path.exists(path))
+
+ post_commit_msg_fail = """#!/bin/sh
+exit 1
+"""
+ with open(post_commit, "w") as f:
+ f.write(post_commit_msg_fail)
+ os.chmod(post_commit, stat.S_IREAD | stat.S_IWRITE | stat.S_IEXEC)
+
+ warnings.simplefilter("always", UserWarning)
+ self.addCleanup(warnings.resetwarnings)
+ warnings_list, restore_warnings = setup_warning_catcher()
+ self.addCleanup(restore_warnings)
+
+ commit_sha2 = r.do_commit(
+ b"empty commit",
+ committer=b"Test Committer <test@nodomain.com>",
+ author=b"Test Author <test@nodomain.com>",
+ commit_timestamp=12345,
+ commit_timezone=0,
+ author_timestamp=12345,
+ author_timezone=0,
+ )
+ expected_warning = UserWarning(
+ "post-commit hook failed: Hook post-commit exited with "
+ "non-zero status 1",
+ )
+ for w in warnings_list:
+ if type(w) is type(expected_warning) and w.args == expected_warning.args:
+ break
+ else:
+ raise AssertionError(
+ f"Expected warning {expected_warning!r} not in {warnings_list!r}"
+ )
+ self.assertEqual([commit_sha], r[commit_sha2].parents)
+
+ def test_as_dict(self):
+ def check(repo):
+ self.assertEqual(
+ repo.refs.subkeys(b"refs/tags"),
+ repo.refs.subkeys(b"refs/tags/"),
+ )
+ self.assertEqual(
+ repo.refs.as_dict(b"refs/tags"),
+ repo.refs.as_dict(b"refs/tags/"),
+ )
+ self.assertEqual(
+ repo.refs.as_dict(b"refs/heads"),
+ repo.refs.as_dict(b"refs/heads/"),
+ )
+
+ bare = self.open_repo("a.git")
+ tmp_dir = self.mkdtemp()
+ self.addCleanup(shutil.rmtree, tmp_dir)
+ with bare.clone(tmp_dir, mkdir=False) as nonbare:
+ check(nonbare)
+ check(bare)
+
+ def test_working_tree(self):
+ temp_dir = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, temp_dir)
+ worktree_temp_dir = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, worktree_temp_dir)
+ r = Repo.init(temp_dir)
+ self.addCleanup(r.close)
+ root_sha = r.do_commit(
+ b"empty commit",
+ committer=b"Test Committer <test@nodomain.com>",
+ author=b"Test Author <test@nodomain.com>",
+ commit_timestamp=12345,
+ commit_timezone=0,
+ author_timestamp=12345,
+ author_timezone=0,
+ )
+ r.refs[b"refs/heads/master"] = root_sha
+ w = Repo._init_new_working_directory(worktree_temp_dir, r)
+ self.addCleanup(w.close)
+ new_sha = w.do_commit(
+ b"new commit",
+ committer=b"Test Committer <test@nodomain.com>",
+ author=b"Test Author <test@nodomain.com>",
+ commit_timestamp=12345,
+ commit_timezone=0,
+ author_timestamp=12345,
+ author_timezone=0,
+ )
+ w.refs[b"HEAD"] = new_sha
+ self.assertEqual(
+ os.path.abspath(r.controldir()), os.path.abspath(w.commondir())
+ )
+ self.assertEqual(r.refs.keys(), w.refs.keys())
+ self.assertNotEqual(r.head(), w.head())
+
+
+class BuildRepoRootTests(TestCase):
+ """Tests that build on-disk repos from scratch.
+
+ Repos live in a temp dir and are torn down after each test. They start with
+ a single commit in master having single file named 'a'.
+ """
+
+ def get_repo_dir(self):
+ return os.path.join(tempfile.mkdtemp(), "test")
+
+ def setUp(self):
+ super().setUp()
+ self._repo_dir = self.get_repo_dir()
+ os.makedirs(self._repo_dir)
+ r = self._repo = Repo.init(self._repo_dir)
+ self.addCleanup(tear_down_repo, r)
+ self.assertFalse(r.bare)
+ self.assertEqual(b"ref: refs/heads/master", r.refs.read_ref(b"HEAD"))
+ self.assertRaises(KeyError, lambda: r.refs[b"refs/heads/master"])
+
+ with open(os.path.join(r.path, "a"), "wb") as f:
+ f.write(b"file contents")
+ r.stage(["a"])
+ commit_sha = r.do_commit(
+ b"msg",
+ committer=b"Test Committer <test@nodomain.com>",
+ author=b"Test Author <test@nodomain.com>",
+ commit_timestamp=12345,
+ commit_timezone=0,
+ author_timestamp=12345,
+ author_timezone=0,
+ )
+ self.assertEqual([], r[commit_sha].parents)
+ self._root_commit = commit_sha
+
+ def test_get_shallow(self):
+ self.assertEqual(set(), self._repo.get_shallow())
+ with open(os.path.join(self._repo.path, ".git", "shallow"), "wb") as f:
+ f.write(b"a90fa2d900a17e99b433217e988c4eb4a2e9a097\n")
+ self.assertEqual(
+ {b"a90fa2d900a17e99b433217e988c4eb4a2e9a097"},
+ self._repo.get_shallow(),
+ )
+
+ def test_update_shallow(self):
+ self._repo.update_shallow(None, None) # no op
+ self.assertEqual(set(), self._repo.get_shallow())
+ self._repo.update_shallow([b"a90fa2d900a17e99b433217e988c4eb4a2e9a097"], None)
+ self.assertEqual(
+ {b"a90fa2d900a17e99b433217e988c4eb4a2e9a097"},
+ self._repo.get_shallow(),
+ )
+ self._repo.update_shallow(
+ [b"a90fa2d900a17e99b433217e988c4eb4a2e9a097"],
+ [b"f9e39b120c68182a4ba35349f832d0e4e61f485c"],
+ )
+ self.assertEqual(
+ {b"a90fa2d900a17e99b433217e988c4eb4a2e9a097"},
+ self._repo.get_shallow(),
+ )
+ self._repo.update_shallow(None, [b"a90fa2d900a17e99b433217e988c4eb4a2e9a097"])
+ self.assertEqual(set(), self._repo.get_shallow())
+ self.assertEqual(
+ False,
+ os.path.exists(os.path.join(self._repo.controldir(), "shallow")),
+ )
+
+ def test_build_repo(self):
+ r = self._repo
+ self.assertEqual(b"ref: refs/heads/master", r.refs.read_ref(b"HEAD"))
+ self.assertEqual(self._root_commit, r.refs[b"refs/heads/master"])
+ expected_blob = objects.Blob.from_string(b"file contents")
+ self.assertEqual(expected_blob.data, r[expected_blob.id].data)
+ actual_commit = r[self._root_commit]
+ self.assertEqual(b"msg", actual_commit.message)
+
+ def test_commit_modified(self):
+ r = self._repo
+ with open(os.path.join(r.path, "a"), "wb") as f:
+ f.write(b"new contents")
+ r.stage(["a"])
+ commit_sha = r.do_commit(
+ b"modified a",
+ committer=b"Test Committer <test@nodomain.com>",
+ author=b"Test Author <test@nodomain.com>",
+ commit_timestamp=12395,
+ commit_timezone=0,
+ author_timestamp=12395,
+ author_timezone=0,
+ )
+ self.assertEqual([self._root_commit], r[commit_sha].parents)
+ a_mode, a_id = tree_lookup_path(r.get_object, r[commit_sha].tree, b"a")
+ self.assertEqual(stat.S_IFREG | 0o644, a_mode)
+ self.assertEqual(b"new contents", r[a_id].data)
+
+ @skipIf(not getattr(os, "symlink", None), "Requires symlink support")
+ def test_commit_symlink(self):
+ r = self._repo
+ os.symlink("a", os.path.join(r.path, "b"))
+ r.stage(["a", "b"])
+ commit_sha = r.do_commit(
+ b"Symlink b",
+ committer=b"Test Committer <test@nodomain.com>",
+ author=b"Test Author <test@nodomain.com>",
+ commit_timestamp=12395,
+ commit_timezone=0,
+ author_timestamp=12395,
+ author_timezone=0,
+ )
+ self.assertEqual([self._root_commit], r[commit_sha].parents)
+ b_mode, b_id = tree_lookup_path(r.get_object, r[commit_sha].tree, b"b")
+ self.assertTrue(stat.S_ISLNK(b_mode))
+ self.assertEqual(b"a", r[b_id].data)
+
+ def test_commit_merge_heads_file(self):
+ tmp_dir = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, tmp_dir)
+ r = Repo.init(tmp_dir)
+ with open(os.path.join(r.path, "a"), "w") as f:
+ f.write("initial text")
+ c1 = r.do_commit(
+ b"initial commit",
+ committer=b"Test Committer <test@nodomain.com>",
+ author=b"Test Author <test@nodomain.com>",
+ commit_timestamp=12395,
+ commit_timezone=0,
+ author_timestamp=12395,
+ author_timezone=0,
+ )
+ with open(os.path.join(r.path, "a"), "w") as f:
+ f.write("merged text")
+ with open(os.path.join(r.path, ".git", "MERGE_HEAD"), "w") as f:
+ f.write("c27a2d21dd136312d7fa9e8baabb82561a1727d0\n")
+ r.stage(["a"])
+ commit_sha = r.do_commit(
+ b"deleted a",
+ committer=b"Test Committer <test@nodomain.com>",
+ author=b"Test Author <test@nodomain.com>",
+ commit_timestamp=12395,
+ commit_timezone=0,
+ author_timestamp=12395,
+ author_timezone=0,
+ )
+ self.assertEqual(
+ [c1, b"c27a2d21dd136312d7fa9e8baabb82561a1727d0"],
+ r[commit_sha].parents,
+ )
+
+ def test_commit_deleted(self):
+ r = self._repo
+ os.remove(os.path.join(r.path, "a"))
+ r.stage(["a"])
+ commit_sha = r.do_commit(
+ b"deleted a",
+ committer=b"Test Committer <test@nodomain.com>",
+ author=b"Test Author <test@nodomain.com>",
+ commit_timestamp=12395,
+ commit_timezone=0,
+ author_timestamp=12395,
+ author_timezone=0,
+ )
+ self.assertEqual([self._root_commit], r[commit_sha].parents)
+ self.assertEqual([], list(r.open_index()))
+ tree = r[r[commit_sha].tree]
+ self.assertEqual([], list(tree.iteritems()))
+
+ def test_commit_follows(self):
+ r = self._repo
+ r.refs.set_symbolic_ref(b"HEAD", b"refs/heads/bla")
+ commit_sha = r.do_commit(
+ b"commit with strange character",
+ committer=b"Test Committer <test@nodomain.com>",
+ author=b"Test Author <test@nodomain.com>",
+ commit_timestamp=12395,
+ commit_timezone=0,
+ author_timestamp=12395,
+ author_timezone=0,
+ ref=b"HEAD",
+ )
+ self.assertEqual(commit_sha, r[b"refs/heads/bla"].id)
+
+ def test_commit_encoding(self):
+ r = self._repo
+ commit_sha = r.do_commit(
+ b"commit with strange character \xee",
+ committer=b"Test Committer <test@nodomain.com>",
+ author=b"Test Author <test@nodomain.com>",
+ commit_timestamp=12395,
+ commit_timezone=0,
+ author_timestamp=12395,
+ author_timezone=0,
+ encoding=b"iso8859-1",
+ )
+ self.assertEqual(b"iso8859-1", r[commit_sha].encoding)
+
+ def test_compression_level(self):
+ r = self._repo
+ c = r.get_config()
+ c.set(("core",), "compression", "3")
+ c.set(("core",), "looseCompression", "4")
+ c.write_to_path()
+ r = Repo(self._repo_dir)
+ self.assertEqual(r.object_store.loose_compression_level, 4)
+
+ def test_repositoryformatversion_unsupported(self):
+ r = self._repo
+ c = r.get_config()
+ c.set(("core",), "repositoryformatversion", "2")
+ c.write_to_path()
+ self.assertRaises(UnsupportedVersion, Repo, self._repo_dir)
+
+ def test_repositoryformatversion_1(self):
+ r = self._repo
+ c = r.get_config()
+ c.set(("core",), "repositoryformatversion", "1")
+ c.write_to_path()
+ Repo(self._repo_dir)
+
+ def test_worktreeconfig_extension(self):
+ r = self._repo
+ c = r.get_config()
+ c.set(("core",), "repositoryformatversion", "1")
+ c.set(("extensions",), "worktreeconfig", True)
+ c.write_to_path()
+ c = r.get_worktree_config()
+ c.set(("user",), "repositoryformatversion", "1")
+ c.set((b"user",), b"name", b"Jelmer")
+ c.write_to_path()
+ cs = r.get_config_stack()
+ self.assertEqual(cs.get(("user",), "name"), b"Jelmer")
+
+ def test_repositoryformatversion_1_extension(self):
+ r = self._repo
+ c = r.get_config()
+ c.set(("core",), "repositoryformatversion", "1")
+ c.set(("extensions",), "unknownextension", True)
+ c.write_to_path()
+ self.assertRaises(UnsupportedExtension, Repo, self._repo_dir)
+
+ def test_commit_encoding_from_config(self):
+ r = self._repo
+ c = r.get_config()
+ c.set(("i18n",), "commitEncoding", "iso8859-1")
+ c.write_to_path()
+ commit_sha = r.do_commit(
+ b"commit with strange character \xee",
+ committer=b"Test Committer <test@nodomain.com>",
+ author=b"Test Author <test@nodomain.com>",
+ commit_timestamp=12395,
+ commit_timezone=0,
+ author_timestamp=12395,
+ author_timezone=0,
+ )
+ self.assertEqual(b"iso8859-1", r[commit_sha].encoding)
+
+ def test_commit_config_identity(self):
+ # commit falls back to the users' identity if it wasn't specified
+ r = self._repo
+ c = r.get_config()
+ c.set((b"user",), b"name", b"Jelmer")
+ c.set((b"user",), b"email", b"jelmer@apache.org")
+ c.write_to_path()
+ commit_sha = r.do_commit(b"message")
+ self.assertEqual(b"Jelmer <jelmer@apache.org>", r[commit_sha].author)
+ self.assertEqual(b"Jelmer <jelmer@apache.org>", r[commit_sha].committer)
+
+ def test_commit_config_identity_strips_than(self):
+ # commit falls back to the users' identity if it wasn't specified,
+ # and strips superfluous <>
+ r = self._repo
+ c = r.get_config()
+ c.set((b"user",), b"name", b"Jelmer")
+ c.set((b"user",), b"email", b"<jelmer@apache.org>")
+ c.write_to_path()
+ commit_sha = r.do_commit(b"message")
+ self.assertEqual(b"Jelmer <jelmer@apache.org>", r[commit_sha].author)
+ self.assertEqual(b"Jelmer <jelmer@apache.org>", r[commit_sha].committer)
+
+ def test_commit_config_identity_in_memoryrepo(self):
+ # commit falls back to the users' identity if it wasn't specified
+ r = MemoryRepo.init_bare([], {})
+ c = r.get_config()
+ c.set((b"user",), b"name", b"Jelmer")
+ c.set((b"user",), b"email", b"jelmer@apache.org")
+
+ commit_sha = r.do_commit(b"message", tree=objects.Tree().id)
+ self.assertEqual(b"Jelmer <jelmer@apache.org>", r[commit_sha].author)
+ self.assertEqual(b"Jelmer <jelmer@apache.org>", r[commit_sha].committer)
+
+ def test_commit_config_identity_from_env(self):
+ # commit falls back to the users' identity if it wasn't specified
+ self.overrideEnv("GIT_COMMITTER_NAME", "joe")
+ self.overrideEnv("GIT_COMMITTER_EMAIL", "joe@example.com")
+ r = self._repo
+ c = r.get_config()
+ c.set((b"user",), b"name", b"Jelmer")
+ c.set((b"user",), b"email", b"jelmer@apache.org")
+ c.write_to_path()
+ commit_sha = r.do_commit(b"message")
+ self.assertEqual(b"Jelmer <jelmer@apache.org>", r[commit_sha].author)
+ self.assertEqual(b"joe <joe@example.com>", r[commit_sha].committer)
+
+ def test_commit_fail_ref(self):
+ r = self._repo
+
+ def set_if_equals(name, old_ref, new_ref, **kwargs):
+ return False
+
+ r.refs.set_if_equals = set_if_equals
+
+ def add_if_new(name, new_ref, **kwargs):
+ self.fail("Unexpected call to add_if_new")
+
+ r.refs.add_if_new = add_if_new
+
+ old_shas = set(r.object_store)
+ self.assertRaises(
+ errors.CommitError,
+ r.do_commit,
+ b"failed commit",
+ committer=b"Test Committer <test@nodomain.com>",
+ author=b"Test Author <test@nodomain.com>",
+ commit_timestamp=12345,
+ commit_timezone=0,
+ author_timestamp=12345,
+ author_timezone=0,
+ )
+ new_shas = set(r.object_store) - old_shas
+ self.assertEqual(1, len(new_shas))
+ # Check that the new commit (now garbage) was added.
+ new_commit = r[new_shas.pop()]
+ self.assertEqual(r[self._root_commit].tree, new_commit.tree)
+ self.assertEqual(b"failed commit", new_commit.message)
+
+ def test_commit_branch(self):
+ r = self._repo
+
+ commit_sha = r.do_commit(
+ b"commit to branch",
+ committer=b"Test Committer <test@nodomain.com>",
+ author=b"Test Author <test@nodomain.com>",
+ commit_timestamp=12395,
+ commit_timezone=0,
+ author_timestamp=12395,
+ author_timezone=0,
+ ref=b"refs/heads/new_branch",
+ )
+ self.assertEqual(self._root_commit, r[b"HEAD"].id)
+ self.assertEqual(commit_sha, r[b"refs/heads/new_branch"].id)
+ self.assertEqual([], r[commit_sha].parents)
+ self.assertIn(b"refs/heads/new_branch", r)
+
+ new_branch_head = commit_sha
+
+ commit_sha = r.do_commit(
+ b"commit to branch 2",
+ committer=b"Test Committer <test@nodomain.com>",
+ author=b"Test Author <test@nodomain.com>",
+ commit_timestamp=12395,
+ commit_timezone=0,
+ author_timestamp=12395,
+ author_timezone=0,
+ ref=b"refs/heads/new_branch",
+ )
+ self.assertEqual(self._root_commit, r[b"HEAD"].id)
+ self.assertEqual(commit_sha, r[b"refs/heads/new_branch"].id)
+ self.assertEqual([new_branch_head], r[commit_sha].parents)
+
+ def test_commit_merge_heads(self):
+ r = self._repo
+ merge_1 = r.do_commit(
+ b"commit to branch 2",
+ committer=b"Test Committer <test@nodomain.com>",
+ author=b"Test Author <test@nodomain.com>",
+ commit_timestamp=12395,
+ commit_timezone=0,
+ author_timestamp=12395,
+ author_timezone=0,
+ ref=b"refs/heads/new_branch",
+ )
+ commit_sha = r.do_commit(
+ b"commit with merge",
+ committer=b"Test Committer <test@nodomain.com>",
+ author=b"Test Author <test@nodomain.com>",
+ commit_timestamp=12395,
+ commit_timezone=0,
+ author_timestamp=12395,
+ author_timezone=0,
+ merge_heads=[merge_1],
+ )
+ self.assertEqual([self._root_commit, merge_1], r[commit_sha].parents)
+
+ def test_commit_dangling_commit(self):
+ r = self._repo
+
+ old_shas = set(r.object_store)
+ old_refs = r.get_refs()
+ commit_sha = r.do_commit(
+ b"commit with no ref",
+ committer=b"Test Committer <test@nodomain.com>",
+ author=b"Test Author <test@nodomain.com>",
+ commit_timestamp=12395,
+ commit_timezone=0,
+ author_timestamp=12395,
+ author_timezone=0,
+ ref=None,
+ )
+ new_shas = set(r.object_store) - old_shas
+
+ # New sha is added, but no new refs
+ self.assertEqual(1, len(new_shas))
+ new_commit = r[new_shas.pop()]
+ self.assertEqual(r[self._root_commit].tree, new_commit.tree)
+ self.assertEqual([], r[commit_sha].parents)
+ self.assertEqual(old_refs, r.get_refs())
+
+ def test_commit_dangling_commit_with_parents(self):
+ r = self._repo
+
+ old_shas = set(r.object_store)
+ old_refs = r.get_refs()
+ commit_sha = r.do_commit(
+ b"commit with no ref",
+ committer=b"Test Committer <test@nodomain.com>",
+ author=b"Test Author <test@nodomain.com>",
+ commit_timestamp=12395,
+ commit_timezone=0,
+ author_timestamp=12395,
+ author_timezone=0,
+ ref=None,
+ merge_heads=[self._root_commit],
+ )
+ new_shas = set(r.object_store) - old_shas
+
+ # New sha is added, but no new refs
+ self.assertEqual(1, len(new_shas))
+ new_commit = r[new_shas.pop()]
+ self.assertEqual(r[self._root_commit].tree, new_commit.tree)
+ self.assertEqual([self._root_commit], r[commit_sha].parents)
+ self.assertEqual(old_refs, r.get_refs())
+
+ def test_stage_absolute(self):
+ r = self._repo
+ os.remove(os.path.join(r.path, "a"))
+ self.assertRaises(ValueError, r.stage, [os.path.join(r.path, "a")])
+
+ def test_stage_deleted(self):
+ r = self._repo
+ os.remove(os.path.join(r.path, "a"))
+ r.stage(["a"])
+ r.stage(["a"]) # double-stage a deleted path
+ self.assertEqual([], list(r.open_index()))
+
+ def test_stage_directory(self):
+ r = self._repo
+ os.mkdir(os.path.join(r.path, "c"))
+ r.stage(["c"])
+ self.assertEqual([b"a"], list(r.open_index()))
+
+ def test_stage_submodule(self):
+ r = self._repo
+ s = Repo.init(os.path.join(r.path, "sub"), mkdir=True)
+ s.do_commit(b"message")
+ r.stage(["sub"])
+ self.assertEqual([b"a", b"sub"], list(r.open_index()))
+
+ def test_unstage_midify_file_with_dir(self):
+ os.mkdir(os.path.join(self._repo.path, "new_dir"))
+ full_path = os.path.join(self._repo.path, "new_dir", "foo")
+
+ with open(full_path, "w") as f:
+ f.write("hello")
+ porcelain.add(self._repo, paths=[full_path])
+ porcelain.commit(
+ self._repo,
+ message=b"unitest",
+ committer=b"Jane <jane@example.com>",
+ author=b"John <john@example.com>",
+ )
+ with open(full_path, "a") as f:
+ f.write("something new")
+ self._repo.unstage(["new_dir/foo"])
+ status = list(porcelain.status(self._repo))
+ self.assertEqual(
+ [{"add": [], "delete": [], "modify": []}, [b"new_dir/foo"], []], status
+ )
+
+ def test_unstage_while_no_commit(self):
+ file = "foo"
+ full_path = os.path.join(self._repo.path, file)
+ with open(full_path, "w") as f:
+ f.write("hello")
+ porcelain.add(self._repo, paths=[full_path])
+ self._repo.unstage([file])
+ status = list(porcelain.status(self._repo))
+ self.assertEqual([{"add": [], "delete": [], "modify": []}, [], ["foo"]], status)
+
+ def test_unstage_add_file(self):
+ file = "foo"
+ full_path = os.path.join(self._repo.path, file)
+ porcelain.commit(
+ self._repo,
+ message=b"unitest",
+ committer=b"Jane <jane@example.com>",
+ author=b"John <john@example.com>",
+ )
+ with open(full_path, "w") as f:
+ f.write("hello")
+ porcelain.add(self._repo, paths=[full_path])
+ self._repo.unstage([file])
+ status = list(porcelain.status(self._repo))
+ self.assertEqual([{"add": [], "delete": [], "modify": []}, [], ["foo"]], status)
+
+ def test_unstage_modify_file(self):
+ file = "foo"
+ full_path = os.path.join(self._repo.path, file)
+ with open(full_path, "w") as f:
+ f.write("hello")
+ porcelain.add(self._repo, paths=[full_path])
+ porcelain.commit(
+ self._repo,
+ message=b"unitest",
+ committer=b"Jane <jane@example.com>",
+ author=b"John <john@example.com>",
+ )
+ with open(full_path, "a") as f:
+ f.write("broken")
+ porcelain.add(self._repo, paths=[full_path])
+ self._repo.unstage([file])
+ status = list(porcelain.status(self._repo))
+
+ self.assertEqual(
+ [{"add": [], "delete": [], "modify": []}, [b"foo"], []], status
+ )
+
+ def test_unstage_remove_file(self):
+ file = "foo"
+ full_path = os.path.join(self._repo.path, file)
+ with open(full_path, "w") as f:
+ f.write("hello")
+ porcelain.add(self._repo, paths=[full_path])
+ porcelain.commit(
+ self._repo,
+ message=b"unitest",
+ committer=b"Jane <jane@example.com>",
+ author=b"John <john@example.com>",
+ )
+ os.remove(full_path)
+ self._repo.unstage([file])
+ status = list(porcelain.status(self._repo))
+ self.assertEqual(
+ [{"add": [], "delete": [], "modify": []}, [b"foo"], []], status
+ )
+
+ def test_reset_index(self):
+ r = self._repo
+ with open(os.path.join(r.path, "a"), "wb") as f:
+ f.write(b"changed")
+ with open(os.path.join(r.path, "b"), "wb") as f:
+ f.write(b"added")
+ r.stage(["a", "b"])
+ status = list(porcelain.status(self._repo))
+ self.assertEqual(
+ [{"add": [b"b"], "delete": [], "modify": [b"a"]}, [], []], status
+ )
+ r.reset_index()
+ status = list(porcelain.status(self._repo))
+ self.assertEqual([{"add": [], "delete": [], "modify": []}, [], ["b"]], status)
+
+ @skipIf(
+ sys.platform in ("win32", "darwin"),
+ "tries to implicitly decode as utf8",
+ )
+ def test_commit_no_encode_decode(self):
+ r = self._repo
+ repo_path_bytes = os.fsencode(r.path)
+ encodings = ("utf8", "latin1")
+ names = ["À".encode(encoding) for encoding in encodings]
+ for name, encoding in zip(names, encodings):
+ full_path = os.path.join(repo_path_bytes, name)
+ with open(full_path, "wb") as f:
+ f.write(encoding.encode("ascii"))
+ # These files are break tear_down_repo, so cleanup these files
+ # ourselves.
+ self.addCleanup(os.remove, full_path)
+
+ r.stage(names)
+ commit_sha = r.do_commit(
+ b"Files with different encodings",
+ committer=b"Test Committer <test@nodomain.com>",
+ author=b"Test Author <test@nodomain.com>",
+ commit_timestamp=12395,
+ commit_timezone=0,
+ author_timestamp=12395,
+ author_timezone=0,
+ ref=None,
+ merge_heads=[self._root_commit],
+ )
+
+ for name, encoding in zip(names, encodings):
+ mode, id = tree_lookup_path(r.get_object, r[commit_sha].tree, name)
+ self.assertEqual(stat.S_IFREG | 0o644, mode)
+ self.assertEqual(encoding.encode("ascii"), r[id].data)
+
+ def test_discover_intended(self):
+ path = os.path.join(self._repo_dir, "b/c")
+ r = Repo.discover(path)
+ self.assertEqual(r.head(), self._repo.head())
+
+ def test_discover_isrepo(self):
+ r = Repo.discover(self._repo_dir)
+ self.assertEqual(r.head(), self._repo.head())
+
+ def test_discover_notrepo(self):
+ with self.assertRaises(NotGitRepository):
+ Repo.discover("/")
+
+
+class CheckUserIdentityTests(TestCase):
+ def test_valid(self):
+ check_user_identity(b"Me <me@example.com>")
+
+ def test_invalid(self):
+ self.assertRaises(InvalidUserIdentity, check_user_identity, b"No Email")
+ self.assertRaises(
+ InvalidUserIdentity, check_user_identity, b"Fullname <missing"
+ )
+ self.assertRaises(
+ InvalidUserIdentity, check_user_identity, b"Fullname missing>"
+ )
+ self.assertRaises(
+ InvalidUserIdentity, check_user_identity, b"Fullname >order<>"
+ )
+ self.assertRaises(
+ InvalidUserIdentity, check_user_identity, b"Contains\0null byte <>"
+ )
+ self.assertRaises(
+ InvalidUserIdentity, check_user_identity, b"Contains\nnewline byte <>"
+ )
blob - /dev/null
blob + 2fe005ccd8948db3b6714924a37e34384bbee91e (mode 644)
--- /dev/null
+++ tests/test_server.py
+# test_server.py -- Tests for the git server
+# Copyright (C) 2010 Google, Inc.
+#
+# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
+# General Public License as public by the Free Software Foundation; version 2.0
+# or (at your option) any later version. You can redistribute it and/or
+# modify it under the terms of either of these two licenses.
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# You should have received a copy of the licenses; if not, see
+# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
+# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
+# License, Version 2.0.
+#
+
+"""Tests for the smart protocol server."""
+
+import os
+import shutil
+import sys
+import tempfile
+from io import BytesIO
+from typing import Dict, List
+
+from dulwich.errors import (
+ GitProtocolError,
+ HangupException,
+ NotGitRepository,
+ UnexpectedCommandError,
+)
+from dulwich.object_store import MemoryObjectStore
+from dulwich.objects import Tree
+from dulwich.protocol import ZERO_SHA, format_capability_line
+from dulwich.repo import MemoryRepo, Repo
+from dulwich.server import (
+ Backend,
+ DictBackend,
+ FileSystemBackend,
+ MultiAckDetailedGraphWalkerImpl,
+ MultiAckGraphWalkerImpl,
+ PackHandler,
+ ReceivePackHandler,
+ SingleAckGraphWalkerImpl,
+ UploadPackHandler,
+ _find_shallow,
+ _ProtocolGraphWalker,
+ _split_proto_line,
+ serve_command,
+ update_server_info,
+)
+
+from . import TestCase
+from .utils import make_commit, make_tag
+
+ONE = b"1" * 40
+TWO = b"2" * 40
+THREE = b"3" * 40
+FOUR = b"4" * 40
+FIVE = b"5" * 40
+SIX = b"6" * 40
+
+
+class TestProto:
+ def __init__(self) -> None:
+ self._output: List[bytes] = []
+ self._received: Dict[int, List[bytes]] = {0: [], 1: [], 2: [], 3: []}
+
+ def set_output(self, output_lines):
+ self._output = output_lines
+
+ def read_pkt_line(self):
+ if self._output:
+ data = self._output.pop(0)
+ if data is not None:
+ return data.rstrip() + b"\n"
+ else:
+ # flush-pkt ('0000').
+ return None
+ else:
+ raise HangupException
+
+ def write_sideband(self, band, data):
+ self._received[band].append(data)
+
+ def write_pkt_line(self, data):
+ self._received[0].append(data)
+
+ def get_received_line(self, band=0):
+ lines = self._received[band]
+ return lines.pop(0)
+
+
+class TestGenericPackHandler(PackHandler):
+ def __init__(self) -> None:
+ PackHandler.__init__(self, Backend(), None)
+
+ @classmethod
+ def capabilities(cls):
+ return [b"cap1", b"cap2", b"cap3"]
+
+ @classmethod
+ def required_capabilities(cls):
+ return [b"cap2"]
+
+
+class HandlerTestCase(TestCase):
+ def setUp(self):
+ super().setUp()
+ self._handler = TestGenericPackHandler()
+
+ def assertSucceeds(self, func, *args, **kwargs):
+ try:
+ func(*args, **kwargs)
+ except GitProtocolError as e:
+ self.fail(e)
+
+ def test_capability_line(self):
+ self.assertEqual(
+ b" cap1 cap2 cap3",
+ format_capability_line([b"cap1", b"cap2", b"cap3"]),
+ )
+
+ def test_set_client_capabilities(self):
+ set_caps = self._handler.set_client_capabilities
+ self.assertSucceeds(set_caps, [b"cap2"])
+ self.assertSucceeds(set_caps, [b"cap1", b"cap2"])
+
+ # different order
+ self.assertSucceeds(set_caps, [b"cap3", b"cap1", b"cap2"])
+
+ # error cases
+ self.assertRaises(GitProtocolError, set_caps, [b"capxxx", b"cap2"])
+ self.assertRaises(GitProtocolError, set_caps, [b"cap1", b"cap3"])
+
+ # ignore innocuous but unknown capabilities
+ self.assertRaises(GitProtocolError, set_caps, [b"cap2", b"ignoreme"])
+ self.assertNotIn(b"ignoreme", self._handler.capabilities())
+ self._handler.innocuous_capabilities = lambda: (b"ignoreme",)
+ self.assertSucceeds(set_caps, [b"cap2", b"ignoreme"])
+
+ def test_has_capability(self):
+ self.assertRaises(GitProtocolError, self._handler.has_capability, b"cap")
+ caps = self._handler.capabilities()
+ self._handler.set_client_capabilities(caps)
+ for cap in caps:
+ self.assertTrue(self._handler.has_capability(cap))
+ self.assertFalse(self._handler.has_capability(b"capxxx"))
+
+
+class UploadPackHandlerTestCase(TestCase):
+ def setUp(self):
+ super().setUp()
+ self.path = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, self.path)
+ self.repo = Repo.init(self.path)
+ self._repo = Repo.init_bare(self.path)
+ backend = DictBackend({b"/": self._repo})
+ self._handler = UploadPackHandler(
+ backend, [b"/", b"host=lolcathost"], TestProto()
+ )
+
+ def test_progress(self):
+ caps = self._handler.required_capabilities()
+ self._handler.set_client_capabilities(caps)
+ self._handler._start_pack_send_phase()
+ self._handler.progress(b"first message")
+ self._handler.progress(b"second message")
+ self.assertEqual(b"first message", self._handler.proto.get_received_line(2))
+ self.assertEqual(b"second message", self._handler.proto.get_received_line(2))
+ self.assertRaises(IndexError, self._handler.proto.get_received_line, 2)
+
+ def test_no_progress(self):
+ caps = [*list(self._handler.required_capabilities()), b"no-progress"]
+ self._handler.set_client_capabilities(caps)
+ self._handler.progress(b"first message")
+ self._handler.progress(b"second message")
+ self.assertRaises(IndexError, self._handler.proto.get_received_line, 2)
+
+ def test_get_tagged(self):
+ refs = {
+ b"refs/tags/tag1": ONE,
+ b"refs/tags/tag2": TWO,
+ b"refs/heads/master": FOUR, # not a tag, no peeled value
+ }
+ # repo needs to peel this object
+ self._repo.object_store.add_object(make_commit(id=FOUR))
+ for name, sha in refs.items():
+ self._repo.refs[name] = sha
+ peeled = {
+ b"refs/tags/tag1": b"1234" * 10,
+ b"refs/tags/tag2": b"5678" * 10,
+ }
+ self._repo.refs._peeled_refs = peeled
+ self._repo.refs.add_packed_refs(refs)
+
+ caps = [*list(self._handler.required_capabilities()), b"include-tag"]
+ self._handler.set_client_capabilities(caps)
+ self.assertEqual(
+ {b"1234" * 10: ONE, b"5678" * 10: TWO},
+ self._handler.get_tagged(refs, repo=self._repo),
+ )
+
+ # non-include-tag case
+ caps = self._handler.required_capabilities()
+ self._handler.set_client_capabilities(caps)
+ self.assertEqual({}, self._handler.get_tagged(refs, repo=self._repo))
+
+ def test_nothing_to_do_but_wants(self):
+ # Just the fact that the client claims to want an object is enough
+ # for sending a pack. Even if there turns out to be nothing.
+ refs = {b"refs/tags/tag1": ONE}
+ tree = Tree()
+ self._repo.object_store.add_object(tree)
+ self._repo.object_store.add_object(make_commit(id=ONE, tree=tree))
+ for name, sha in refs.items():
+ self._repo.refs[name] = sha
+ self._handler.proto.set_output(
+ [
+ b"want " + ONE + b" side-band-64k thin-pack ofs-delta",
+ None,
+ b"have " + ONE,
+ b"done",
+ None,
+ ]
+ )
+ self._handler.handle()
+ # The server should always send a pack, even if it's empty.
+ self.assertTrue(self._handler.proto.get_received_line(1).startswith(b"PACK"))
+
+ def test_nothing_to_do_no_wants(self):
+ # Don't send a pack if the client didn't ask for anything.
+ refs = {b"refs/tags/tag1": ONE}
+ tree = Tree()
+ self._repo.object_store.add_object(tree)
+ self._repo.object_store.add_object(make_commit(id=ONE, tree=tree))
+ for ref, sha in refs.items():
+ self._repo.refs[ref] = sha
+ self._handler.proto.set_output([None])
+ self._handler.handle()
+ # The server should not send a pack, since the client didn't ask for
+ # anything.
+ self.assertEqual([], self._handler.proto._received[1])
+
+
+class FindShallowTests(TestCase):
+ def setUp(self):
+ super().setUp()
+ self._store = MemoryObjectStore()
+
+ def make_commit(self, **attrs):
+ commit = make_commit(**attrs)
+ self._store.add_object(commit)
+ return commit
+
+ def make_linear_commits(self, n, message=b""):
+ commits = []
+ parents = []
+ for _ in range(n):
+ commits.append(self.make_commit(parents=parents, message=message))
+ parents = [commits[-1].id]
+ return commits
+
+ def assertSameElements(self, expected, actual):
+ self.assertEqual(set(expected), set(actual))
+
+ def test_linear(self):
+ c1, c2, c3 = self.make_linear_commits(3)
+
+ self.assertEqual(({c3.id}, set()), _find_shallow(self._store, [c3.id], 1))
+ self.assertEqual(
+ ({c2.id}, {c3.id}),
+ _find_shallow(self._store, [c3.id], 2),
+ )
+ self.assertEqual(
+ ({c1.id}, {c2.id, c3.id}),
+ _find_shallow(self._store, [c3.id], 3),
+ )
+ self.assertEqual(
+ (set(), {c1.id, c2.id, c3.id}),
+ _find_shallow(self._store, [c3.id], 4),
+ )
+
+ def test_multiple_independent(self):
+ a = self.make_linear_commits(2, message=b"a")
+ b = self.make_linear_commits(2, message=b"b")
+ c = self.make_linear_commits(2, message=b"c")
+ heads = [a[1].id, b[1].id, c[1].id]
+
+ self.assertEqual(
+ ({a[0].id, b[0].id, c[0].id}, set(heads)),
+ _find_shallow(self._store, heads, 2),
+ )
+
+ def test_multiple_overlapping(self):
+ # Create the following commit tree:
+ # 1--2
+ # \
+ # 3--4
+ c1, c2 = self.make_linear_commits(2)
+ c3 = self.make_commit(parents=[c1.id])
+ c4 = self.make_commit(parents=[c3.id])
+
+ # 1 is shallow along the path from 4, but not along the path from 2.
+ self.assertEqual(
+ ({c1.id}, {c1.id, c2.id, c3.id, c4.id}),
+ _find_shallow(self._store, [c2.id, c4.id], 3),
+ )
+
+ def test_merge(self):
+ c1 = self.make_commit()
+ c2 = self.make_commit()
+ c3 = self.make_commit(parents=[c1.id, c2.id])
+
+ self.assertEqual(
+ ({c1.id, c2.id}, {c3.id}),
+ _find_shallow(self._store, [c3.id], 2),
+ )
+
+ def test_tag(self):
+ c1, c2 = self.make_linear_commits(2)
+ tag = make_tag(c2, name=b"tag")
+ self._store.add_object(tag)
+
+ self.assertEqual(
+ ({c1.id}, {c2.id}),
+ _find_shallow(self._store, [tag.id], 2),
+ )
+
+
+class TestUploadPackHandler(UploadPackHandler):
+ @classmethod
+ def required_capabilities(self):
+ return []
+
+
+class ReceivePackHandlerTestCase(TestCase):
+ def setUp(self):
+ super().setUp()
+ self._repo = MemoryRepo.init_bare([], {})
+ backend = DictBackend({b"/": self._repo})
+ self._handler = ReceivePackHandler(
+ backend, [b"/", b"host=lolcathost"], TestProto()
+ )
+
+ def test_apply_pack_del_ref(self):
+ refs = {b"refs/heads/master": TWO, b"refs/heads/fake-branch": ONE}
+ self._repo.refs._update(refs)
+ update_refs = [
+ [ONE, ZERO_SHA, b"refs/heads/fake-branch"],
+ ]
+ self._handler.set_client_capabilities([b"delete-refs"])
+ status = self._handler._apply_pack(update_refs)
+ self.assertEqual(status[0][0], b"unpack")
+ self.assertEqual(status[0][1], b"ok")
+ self.assertEqual(status[1][0], b"refs/heads/fake-branch")
+ self.assertEqual(status[1][1], b"ok")
+
+
+class ProtocolGraphWalkerEmptyTestCase(TestCase):
+ def setUp(self):
+ super().setUp()
+ self._repo = MemoryRepo.init_bare([], {})
+ backend = DictBackend({b"/": self._repo})
+ self._walker = _ProtocolGraphWalker(
+ TestUploadPackHandler(backend, [b"/", b"host=lolcats"], TestProto()),
+ self._repo.object_store,
+ self._repo.get_peeled,
+ self._repo.refs.get_symrefs,
+ )
+
+ def test_empty_repository(self):
+ # The server should wait for a flush packet.
+ self._walker.proto.set_output([])
+ self.assertRaises(HangupException, self._walker.determine_wants, {})
+ self.assertEqual(None, self._walker.proto.get_received_line())
+
+ self._walker.proto.set_output([None])
+ self.assertEqual([], self._walker.determine_wants({}))
+ self.assertEqual(None, self._walker.proto.get_received_line())
+
+
+class ProtocolGraphWalkerTestCase(TestCase):
+ def setUp(self):
+ super().setUp()
+ # Create the following commit tree:
+ # 3---5
+ # /
+ # 1---2---4
+ commits = [
+ make_commit(id=ONE, parents=[], commit_time=111),
+ make_commit(id=TWO, parents=[ONE], commit_time=222),
+ make_commit(id=THREE, parents=[ONE], commit_time=333),
+ make_commit(id=FOUR, parents=[TWO], commit_time=444),
+ make_commit(id=FIVE, parents=[THREE], commit_time=555),
+ ]
+ self._repo = MemoryRepo.init_bare(commits, {})
+ backend = DictBackend({b"/": self._repo})
+ self._walker = _ProtocolGraphWalker(
+ TestUploadPackHandler(backend, [b"/", b"host=lolcats"], TestProto()),
+ self._repo.object_store,
+ self._repo.get_peeled,
+ self._repo.refs.get_symrefs,
+ )
+
+ def test_all_wants_satisfied_no_haves(self):
+ self._walker.set_wants([ONE])
+ self.assertFalse(self._walker.all_wants_satisfied([]))
+ self._walker.set_wants([TWO])
+ self.assertFalse(self._walker.all_wants_satisfied([]))
+ self._walker.set_wants([THREE])
+ self.assertFalse(self._walker.all_wants_satisfied([]))
+
+ def test_all_wants_satisfied_have_root(self):
+ self._walker.set_wants([ONE])
+ self.assertTrue(self._walker.all_wants_satisfied([ONE]))
+ self._walker.set_wants([TWO])
+ self.assertTrue(self._walker.all_wants_satisfied([ONE]))
+ self._walker.set_wants([THREE])
+ self.assertTrue(self._walker.all_wants_satisfied([ONE]))
+
+ def test_all_wants_satisfied_have_branch(self):
+ self._walker.set_wants([TWO])
+ self.assertTrue(self._walker.all_wants_satisfied([TWO]))
+ # wrong branch
+ self._walker.set_wants([THREE])
+ self.assertFalse(self._walker.all_wants_satisfied([TWO]))
+
+ def test_all_wants_satisfied(self):
+ self._walker.set_wants([FOUR, FIVE])
+ # trivial case: wants == haves
+ self.assertTrue(self._walker.all_wants_satisfied([FOUR, FIVE]))
+ # cases that require walking the commit tree
+ self.assertTrue(self._walker.all_wants_satisfied([ONE]))
+ self.assertFalse(self._walker.all_wants_satisfied([TWO]))
+ self.assertFalse(self._walker.all_wants_satisfied([THREE]))
+ self.assertTrue(self._walker.all_wants_satisfied([TWO, THREE]))
+
+ def test_split_proto_line(self):
+ allowed = (b"want", b"done", None)
+ self.assertEqual(
+ (b"want", ONE), _split_proto_line(b"want " + ONE + b"\n", allowed)
+ )
+ self.assertEqual(
+ (b"want", TWO), _split_proto_line(b"want " + TWO + b"\n", allowed)
+ )
+ self.assertRaises(GitProtocolError, _split_proto_line, b"want xxxx\n", allowed)
+ self.assertRaises(
+ UnexpectedCommandError,
+ _split_proto_line,
+ b"have " + THREE + b"\n",
+ allowed,
+ )
+ self.assertRaises(
+ GitProtocolError,
+ _split_proto_line,
+ b"foo " + FOUR + b"\n",
+ allowed,
+ )
+ self.assertRaises(GitProtocolError, _split_proto_line, b"bar", allowed)
+ self.assertEqual((b"done", None), _split_proto_line(b"done\n", allowed))
+ self.assertEqual((None, None), _split_proto_line(b"", allowed))
+
+ def test_determine_wants(self):
+ self._walker.proto.set_output([None])
+ self.assertEqual([], self._walker.determine_wants({}))
+ self.assertEqual(None, self._walker.proto.get_received_line())
+
+ self._walker.proto.set_output(
+ [
+ b"want " + ONE + b" multi_ack",
+ b"want " + TWO,
+ None,
+ ]
+ )
+ heads = {
+ b"refs/heads/ref1": ONE,
+ b"refs/heads/ref2": TWO,
+ b"refs/heads/ref3": THREE,
+ }
+ self._repo.refs._update(heads)
+ self.assertEqual([ONE, TWO], self._walker.determine_wants(heads))
+
+ self._walker.advertise_refs = True
+ self.assertEqual([], self._walker.determine_wants(heads))
+ self._walker.advertise_refs = False
+
+ self._walker.proto.set_output([b"want " + FOUR + b" multi_ack", None])
+ self.assertRaises(GitProtocolError, self._walker.determine_wants, heads)
+
+ self._walker.proto.set_output([None])
+ self.assertEqual([], self._walker.determine_wants(heads))
+
+ self._walker.proto.set_output([b"want " + ONE + b" multi_ack", b"foo", None])
+ self.assertRaises(GitProtocolError, self._walker.determine_wants, heads)
+
+ self._walker.proto.set_output([b"want " + FOUR + b" multi_ack", None])
+ self.assertRaises(GitProtocolError, self._walker.determine_wants, heads)
+
+ def test_determine_wants_advertisement(self):
+ self._walker.proto.set_output([None])
+ # advertise branch tips plus tag
+ heads = {
+ b"refs/heads/ref4": FOUR,
+ b"refs/heads/ref5": FIVE,
+ b"refs/heads/tag6": SIX,
+ }
+ self._repo.refs._update(heads)
+ self._repo.refs._update_peeled(heads)
+ self._repo.refs._update_peeled({b"refs/heads/tag6": FIVE})
+ self._walker.determine_wants(heads)
+ lines = []
+ while True:
+ line = self._walker.proto.get_received_line()
+ if line is None:
+ break
+ # strip capabilities list if present
+ if b"\x00" in line:
+ line = line[: line.index(b"\x00")]
+ lines.append(line.rstrip())
+
+ self.assertEqual(
+ [
+ FOUR + b" refs/heads/ref4",
+ FIVE + b" refs/heads/ref5",
+ FIVE + b" refs/heads/tag6^{}",
+ SIX + b" refs/heads/tag6",
+ ],
+ sorted(lines),
+ )
+
+ # ensure peeled tag was advertised immediately following tag
+ for i, line in enumerate(lines):
+ if line.endswith(b" refs/heads/tag6"):
+ self.assertEqual(FIVE + b" refs/heads/tag6^{}", lines[i + 1])
+
+ # TODO: test commit time cutoff
+
+ def _handle_shallow_request(self, lines, heads):
+ self._walker.proto.set_output([*lines, None])
+ self._walker._handle_shallow_request(heads)
+
+ def assertReceived(self, expected):
+ self.assertEqual(
+ expected, list(iter(self._walker.proto.get_received_line, None))
+ )
+
+ def test_handle_shallow_request_no_client_shallows(self):
+ self._handle_shallow_request([b"deepen 2\n"], [FOUR, FIVE])
+ self.assertEqual({TWO, THREE}, self._walker.shallow)
+ self.assertReceived(
+ [
+ b"shallow " + TWO,
+ b"shallow " + THREE,
+ ]
+ )
+
+ def test_handle_shallow_request_no_new_shallows(self):
+ lines = [
+ b"shallow " + TWO + b"\n",
+ b"shallow " + THREE + b"\n",
+ b"deepen 2\n",
+ ]
+ self._handle_shallow_request(lines, [FOUR, FIVE])
+ self.assertEqual({TWO, THREE}, self._walker.shallow)
+ self.assertReceived([])
+
+ def test_handle_shallow_request_unshallows(self):
+ lines = [
+ b"shallow " + TWO + b"\n",
+ b"deepen 3\n",
+ ]
+ self._handle_shallow_request(lines, [FOUR, FIVE])
+ self.assertEqual({ONE}, self._walker.shallow)
+ self.assertReceived(
+ [
+ b"shallow " + ONE,
+ b"unshallow " + TWO,
+ # THREE is unshallow but was is not shallow in the client
+ ]
+ )
+
+
+class TestProtocolGraphWalker:
+ def __init__(self) -> None:
+ self.acks: List[bytes] = []
+ self.lines: List[bytes] = []
+ self.wants_satisified = False
+ self.stateless_rpc = None
+ self.advertise_refs = False
+ self._impl = None
+ self.done_required = True
+ self.done_received = False
+ self._empty = False
+ self.pack_sent = False
+
+ def read_proto_line(self, allowed):
+ command, sha = self.lines.pop(0)
+ if allowed is not None:
+ assert command in allowed
+ return command, sha
+
+ def send_ack(self, sha, ack_type=b""):
+ self.acks.append((sha, ack_type))
+
+ def send_nak(self):
+ self.acks.append((None, b"nak"))
+
+ def all_wants_satisfied(self, haves):
+ if haves:
+ return self.wants_satisified
+
+ def pop_ack(self):
+ if not self.acks:
+ return None
+ return self.acks.pop(0)
+
+ def handle_done(self):
+ if not self._impl:
+ return
+ # Whether or not PACK is sent after is determined by this, so
+ # record this value.
+ self.pack_sent = self._impl.handle_done(self.done_required, self.done_received)
+ return self.pack_sent
+
+ def notify_done(self):
+ self.done_received = True
+
+
+class AckGraphWalkerImplTestCase(TestCase):
+ """Base setup and asserts for AckGraphWalker tests."""
+
+ def setUp(self):
+ super().setUp()
+ self._walker = TestProtocolGraphWalker()
+ self._walker.lines = [
+ (b"have", TWO),
+ (b"have", ONE),
+ (b"have", THREE),
+ (b"done", None),
+ ]
+ self._impl = self.impl_cls(self._walker)
+ self._walker._impl = self._impl
+
+ def assertNoAck(self):
+ self.assertEqual(None, self._walker.pop_ack())
+
+ def assertAcks(self, acks):
+ for sha, ack_type in acks:
+ self.assertEqual((sha, ack_type), self._walker.pop_ack())
+ self.assertNoAck()
+
+ def assertAck(self, sha, ack_type=b""):
+ self.assertAcks([(sha, ack_type)])
+
+ def assertNak(self):
+ self.assertAck(None, b"nak")
+
+ def assertNextEquals(self, sha):
+ self.assertEqual(sha, next(self._impl))
+
+ def assertNextEmpty(self):
+ # This is necessary because of no-done - the assumption that it
+ # it safe to immediately send out the final ACK is no longer
+ # true but the test is still needed for it. TestProtocolWalker
+ # does implement the handle_done which will determine whether
+ # the final confirmation can be sent.
+ self.assertRaises(IndexError, next, self._impl)
+ self._walker.handle_done()
+
+
+class SingleAckGraphWalkerImplTestCase(AckGraphWalkerImplTestCase):
+ impl_cls = SingleAckGraphWalkerImpl
+
+ def test_single_ack(self):
+ self.assertNextEquals(TWO)
+ self.assertNoAck()
+
+ self.assertNextEquals(ONE)
+ self._impl.ack(ONE)
+ self.assertAck(ONE)
+
+ self.assertNextEquals(THREE)
+ self._impl.ack(THREE)
+ self.assertNoAck()
+
+ self.assertNextEquals(None)
+ self.assertNoAck()
+
+ def test_single_ack_flush(self):
+ # same as ack test but ends with a flush-pkt instead of done
+ self._walker.lines[-1] = (None, None)
+
+ self.assertNextEquals(TWO)
+ self.assertNoAck()
+
+ self.assertNextEquals(ONE)
+ self._impl.ack(ONE)
+ self.assertAck(ONE)
+
+ self.assertNextEquals(THREE)
+ self.assertNoAck()
+
+ self.assertNextEquals(None)
+ self.assertNoAck()
+
+ def test_single_ack_nak(self):
+ self.assertNextEquals(TWO)
+ self.assertNoAck()
+
+ self.assertNextEquals(ONE)
+ self.assertNoAck()
+
+ self.assertNextEquals(THREE)
+ self.assertNoAck()
+
+ self.assertNextEquals(None)
+ self.assertNextEmpty()
+ self.assertNak()
+
+ def test_single_ack_nak_flush(self):
+ # same as nak test but ends with a flush-pkt instead of done
+ self._walker.lines[-1] = (None, None)
+
+ self.assertNextEquals(TWO)
+ self.assertNoAck()
+
+ self.assertNextEquals(ONE)
+ self.assertNoAck()
+
+ self.assertNextEquals(THREE)
+ self.assertNoAck()
+
+ self.assertNextEquals(None)
+ self.assertNextEmpty()
+ self.assertNak()
+
+
+class MultiAckGraphWalkerImplTestCase(AckGraphWalkerImplTestCase):
+ impl_cls = MultiAckGraphWalkerImpl
+
+ def test_multi_ack(self):
+ self.assertNextEquals(TWO)
+ self.assertNoAck()
+
+ self.assertNextEquals(ONE)
+ self._impl.ack(ONE)
+ self.assertAck(ONE, b"continue")
+
+ self.assertNextEquals(THREE)
+ self._impl.ack(THREE)
+ self.assertAck(THREE, b"continue")
+
+ self.assertNextEquals(None)
+ self.assertNextEmpty()
+ self.assertAck(THREE)
+
+ def test_multi_ack_partial(self):
+ self.assertNextEquals(TWO)
+ self.assertNoAck()
+
+ self.assertNextEquals(ONE)
+ self._impl.ack(ONE)
+ self.assertAck(ONE, b"continue")
+
+ self.assertNextEquals(THREE)
+ self.assertNoAck()
+
+ self.assertNextEquals(None)
+ self.assertNextEmpty()
+ self.assertAck(ONE)
+
+ def test_multi_ack_flush(self):
+ self._walker.lines = [
+ (b"have", TWO),
+ (None, None),
+ (b"have", ONE),
+ (b"have", THREE),
+ (b"done", None),
+ ]
+ self.assertNextEquals(TWO)
+ self.assertNoAck()
+
+ self.assertNextEquals(ONE)
+ self.assertNak() # nak the flush-pkt
+
+ self._impl.ack(ONE)
+ self.assertAck(ONE, b"continue")
+
+ self.assertNextEquals(THREE)
+ self._impl.ack(THREE)
+ self.assertAck(THREE, b"continue")
+
+ self.assertNextEquals(None)
+ self.assertNextEmpty()
+ self.assertAck(THREE)
+
+ def test_multi_ack_nak(self):
+ self.assertNextEquals(TWO)
+ self.assertNoAck()
+
+ self.assertNextEquals(ONE)
+ self.assertNoAck()
+
+ self.assertNextEquals(THREE)
+ self.assertNoAck()
+
+ self.assertNextEquals(None)
+ self.assertNextEmpty()
+ self.assertNak()
+
+
+class MultiAckDetailedGraphWalkerImplTestCase(AckGraphWalkerImplTestCase):
+ impl_cls = MultiAckDetailedGraphWalkerImpl
+
+ def test_multi_ack(self):
+ self.assertNextEquals(TWO)
+ self.assertNoAck()
+
+ self.assertNextEquals(ONE)
+ self._impl.ack(ONE)
+ self.assertAck(ONE, b"common")
+
+ self.assertNextEquals(THREE)
+ self._impl.ack(THREE)
+ self.assertAck(THREE, b"common")
+
+ # done is read.
+ self._walker.wants_satisified = True
+ self.assertNextEquals(None)
+ self._walker.lines.append((None, None))
+ self.assertNextEmpty()
+ self.assertAcks([(THREE, b"ready"), (None, b"nak"), (THREE, b"")])
+ # PACK is sent
+ self.assertTrue(self._walker.pack_sent)
+
+ def test_multi_ack_nodone(self):
+ self._walker.done_required = False
+ self.assertNextEquals(TWO)
+ self.assertNoAck()
+
+ self.assertNextEquals(ONE)
+ self._impl.ack(ONE)
+ self.assertAck(ONE, b"common")
+
+ self.assertNextEquals(THREE)
+ self._impl.ack(THREE)
+ self.assertAck(THREE, b"common")
+
+ # done is read.
+ self._walker.wants_satisified = True
+ self.assertNextEquals(None)
+ self._walker.lines.append((None, None))
+ self.assertNextEmpty()
+ self.assertAcks([(THREE, b"ready"), (None, b"nak"), (THREE, b"")])
+ # PACK is sent
+ self.assertTrue(self._walker.pack_sent)
+
+ def test_multi_ack_flush_end(self):
+ # transmission ends with a flush-pkt without a done but no-done is
+ # assumed.
+ self._walker.lines[-1] = (None, None)
+ self.assertNextEquals(TWO)
+ self.assertNoAck()
+
+ self.assertNextEquals(ONE)
+ self._impl.ack(ONE)
+ self.assertAck(ONE, b"common")
+
+ self.assertNextEquals(THREE)
+ self._impl.ack(THREE)
+ self.assertAck(THREE, b"common")
+
+ # no done is read
+ self._walker.wants_satisified = True
+ self.assertNextEmpty()
+ self.assertAcks([(THREE, b"ready"), (None, b"nak")])
+ # PACK is NOT sent
+ self.assertFalse(self._walker.pack_sent)
+
+ def test_multi_ack_flush_end_nodone(self):
+ # transmission ends with a flush-pkt without a done but no-done is
+ # assumed.
+ self._walker.lines[-1] = (None, None)
+ self._walker.done_required = False
+ self.assertNextEquals(TWO)
+ self.assertNoAck()
+
+ self.assertNextEquals(ONE)
+ self._impl.ack(ONE)
+ self.assertAck(ONE, b"common")
+
+ self.assertNextEquals(THREE)
+ self._impl.ack(THREE)
+ self.assertAck(THREE, b"common")
+
+ # no done is read, but pretend it is (last 'ACK 'commit_id' '')
+ self._walker.wants_satisified = True
+ self.assertNextEmpty()
+ self.assertAcks([(THREE, b"ready"), (None, b"nak"), (THREE, b"")])
+ # PACK is sent
+ self.assertTrue(self._walker.pack_sent)
+
+ def test_multi_ack_partial(self):
+ self.assertNextEquals(TWO)
+ self.assertNoAck()
+
+ self.assertNextEquals(ONE)
+ self._impl.ack(ONE)
+ self.assertAck(ONE, b"common")
+
+ self.assertNextEquals(THREE)
+ self.assertNoAck()
+
+ self.assertNextEquals(None)
+ self.assertNextEmpty()
+ self.assertAck(ONE)
+
+ def test_multi_ack_flush(self):
+ # same as ack test but contains a flush-pkt in the middle
+ self._walker.lines = [
+ (b"have", TWO),
+ (None, None),
+ (b"have", ONE),
+ (b"have", THREE),
+ (b"done", None),
+ (None, None),
+ ]
+ self.assertNextEquals(TWO)
+ self.assertNoAck()
+
+ self.assertNextEquals(ONE)
+ self.assertNak() # nak the flush-pkt
+
+ self._impl.ack(ONE)
+ self.assertAck(ONE, b"common")
+
+ self.assertNextEquals(THREE)
+ self._impl.ack(THREE)
+ self.assertAck(THREE, b"common")
+
+ self._walker.wants_satisified = True
+ self.assertNextEquals(None)
+ self.assertNextEmpty()
+ self.assertAcks([(THREE, b"ready"), (None, b"nak"), (THREE, b"")])
+
+ def test_multi_ack_nak(self):
+ self.assertNextEquals(TWO)
+ self.assertNoAck()
+
+ self.assertNextEquals(ONE)
+ self.assertNoAck()
+
+ self.assertNextEquals(THREE)
+ self.assertNoAck()
+
+ # Done is sent here.
+ self.assertNextEquals(None)
+ self.assertNextEmpty()
+ self.assertNak()
+ self.assertNextEmpty()
+ self.assertTrue(self._walker.pack_sent)
+
+ def test_multi_ack_nak_nodone(self):
+ self._walker.done_required = False
+ self.assertNextEquals(TWO)
+ self.assertNoAck()
+
+ self.assertNextEquals(ONE)
+ self.assertNoAck()
+
+ self.assertNextEquals(THREE)
+ self.assertNoAck()
+
+ # Done is sent here.
+ self.assertFalse(self._walker.pack_sent)
+ self.assertNextEquals(None)
+ self.assertNextEmpty()
+ self.assertTrue(self._walker.pack_sent)
+ self.assertNak()
+ self.assertNextEmpty()
+
+ def test_multi_ack_nak_flush(self):
+ # same as nak test but contains a flush-pkt in the middle
+ self._walker.lines = [
+ (b"have", TWO),
+ (None, None),
+ (b"have", ONE),
+ (b"have", THREE),
+ (b"done", None),
+ ]
+ self.assertNextEquals(TWO)
+ self.assertNoAck()
+
+ self.assertNextEquals(ONE)
+ self.assertNak()
+
+ self.assertNextEquals(THREE)
+ self.assertNoAck()
+
+ self.assertNextEquals(None)
+ self.assertNextEmpty()
+ self.assertNak()
+
+ def test_multi_ack_stateless(self):
+ # transmission ends with a flush-pkt
+ self._walker.lines[-1] = (None, None)
+ self._walker.stateless_rpc = True
+
+ self.assertNextEquals(TWO)
+ self.assertNoAck()
+
+ self.assertNextEquals(ONE)
+ self.assertNoAck()
+
+ self.assertNextEquals(THREE)
+ self.assertNoAck()
+
+ self.assertFalse(self._walker.pack_sent)
+ self.assertNextEquals(None)
+ self.assertNak()
+
+ self.assertNextEmpty()
+ self.assertNoAck()
+ self.assertFalse(self._walker.pack_sent)
+
+ def test_multi_ack_stateless_nodone(self):
+ self._walker.done_required = False
+ # transmission ends with a flush-pkt
+ self._walker.lines[-1] = (None, None)
+ self._walker.stateless_rpc = True
+
+ self.assertNextEquals(TWO)
+ self.assertNoAck()
+
+ self.assertNextEquals(ONE)
+ self.assertNoAck()
+
+ self.assertNextEquals(THREE)
+ self.assertNoAck()
+
+ self.assertFalse(self._walker.pack_sent)
+ self.assertNextEquals(None)
+ self.assertNak()
+
+ self.assertNextEmpty()
+ self.assertNoAck()
+ # PACK will still not be sent.
+ self.assertFalse(self._walker.pack_sent)
+
+
+class FileSystemBackendTests(TestCase):
+ """Tests for FileSystemBackend."""
+
+ def setUp(self):
+ super().setUp()
+ self.path = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, self.path)
+ self.repo = Repo.init(self.path)
+ if sys.platform == "win32":
+ self.backend = FileSystemBackend(self.path[0] + ":" + os.sep)
+ else:
+ self.backend = FileSystemBackend()
+
+ def test_nonexistant(self):
+ self.assertRaises(
+ NotGitRepository,
+ self.backend.open_repository,
+ "/does/not/exist/unless/foo",
+ )
+
+ def test_absolute(self):
+ repo = self.backend.open_repository(self.path)
+ self.assertTrue(
+ os.path.samefile(
+ os.path.abspath(repo.path), os.path.abspath(self.repo.path)
+ )
+ )
+
+ def test_child(self):
+ self.assertRaises(
+ NotGitRepository,
+ self.backend.open_repository,
+ os.path.join(self.path, "foo"),
+ )
+
+ def test_bad_repo_path(self):
+ backend = FileSystemBackend()
+
+ self.assertRaises(NotGitRepository, lambda: backend.open_repository("/ups"))
+
+
+class DictBackendTests(TestCase):
+ """Tests for DictBackend."""
+
+ def test_nonexistant(self):
+ repo = MemoryRepo.init_bare([], {})
+ backend = DictBackend({b"/": repo})
+ self.assertRaises(
+ NotGitRepository,
+ backend.open_repository,
+ "/does/not/exist/unless/foo",
+ )
+
+ def test_bad_repo_path(self):
+ repo = MemoryRepo.init_bare([], {})
+ backend = DictBackend({b"/": repo})
+
+ self.assertRaises(NotGitRepository, lambda: backend.open_repository("/ups"))
+
+
+class ServeCommandTests(TestCase):
+ """Tests for serve_command."""
+
+ def setUp(self):
+ super().setUp()
+ self.backend = DictBackend({})
+
+ def serve_command(self, handler_cls, args, inf, outf):
+ return serve_command(
+ handler_cls,
+ [b"test", *args],
+ backend=self.backend,
+ inf=inf,
+ outf=outf,
+ )
+
+ def test_receive_pack(self):
+ commit = make_commit(id=ONE, parents=[], commit_time=111)
+ self.backend.repos[b"/"] = MemoryRepo.init_bare(
+ [commit], {b"refs/heads/master": commit.id}
+ )
+ outf = BytesIO()
+ exitcode = self.serve_command(
+ ReceivePackHandler, [b"/"], BytesIO(b"0000"), outf
+ )
+ outlines = outf.getvalue().splitlines()
+ self.assertEqual(2, len(outlines))
+ self.assertEqual(
+ b"1111111111111111111111111111111111111111 refs/heads/master",
+ outlines[0][4:].split(b"\x00")[0],
+ )
+ self.assertEqual(b"0000", outlines[-1])
+ self.assertEqual(0, exitcode)
+
+
+class UpdateServerInfoTests(TestCase):
+ """Tests for update_server_info."""
+
+ def setUp(self):
+ super().setUp()
+ self.path = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, self.path)
+ self.repo = Repo.init(self.path)
+
+ def test_empty(self):
+ update_server_info(self.repo)
+ with open(os.path.join(self.path, ".git", "info", "refs"), "rb") as f:
+ self.assertEqual(b"", f.read())
+ p = os.path.join(self.path, ".git", "objects", "info", "packs")
+ with open(p, "rb") as f:
+ self.assertEqual(b"", f.read())
+
+ def test_simple(self):
+ commit_id = self.repo.do_commit(
+ message=b"foo",
+ committer=b"Joe Example <joe@example.com>",
+ ref=b"refs/heads/foo",
+ )
+ update_server_info(self.repo)
+ with open(os.path.join(self.path, ".git", "info", "refs"), "rb") as f:
+ self.assertEqual(f.read(), commit_id + b"\trefs/heads/foo\n")
+ p = os.path.join(self.path, ".git", "objects", "info", "packs")
+ with open(p, "rb") as f:
+ self.assertEqual(f.read(), b"")
blob - /dev/null
blob + 2bd3b7c4ae40b4fd97ea15ce9dc40057a87b8244 (mode 644)
--- /dev/null
+++ tests/test_stash.py
+# test_stash.py -- tests for stash
+# Copyright (C) 2018 Jelmer Vernooij <jelmer@jelmer.uk>
+#
+# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
+# General Public License as public by the Free Software Foundation; version 2.0
+# or (at your option) any later version. You can redistribute it and/or
+# modify it under the terms of either of these two licenses.
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# You should have received a copy of the licenses; if not, see
+# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
+# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
+# License, Version 2.0.
+#
+
+"""Tests for stashes."""
+
+from dulwich.repo import MemoryRepo
+from dulwich.stash import Stash
+
+from . import TestCase
+
+
+class StashTests(TestCase):
+ """Tests for stash."""
+
+ def test_obtain(self):
+ repo = MemoryRepo()
+ stash = Stash.from_repo(repo)
+ self.assertIsInstance(stash, Stash)
blob - /dev/null
blob + e1814e94f13685e284387f640e55734c82e3c2c0 (mode 644)
--- /dev/null
+++ tests/test_utils.py
+# test_utils.py -- Tests for git test utilities.
+# Copyright (C) 2010 Google, Inc.
+#
+# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
+# General Public License as public by the Free Software Foundation; version 2.0
+# or (at your option) any later version. You can redistribute it and/or
+# modify it under the terms of either of these two licenses.
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# You should have received a copy of the licenses; if not, see
+# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
+# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
+# License, Version 2.0.
+#
+
+"""Tests for git test utilities."""
+
+from dulwich.object_store import MemoryObjectStore
+from dulwich.objects import Blob
+
+from . import TestCase
+from .utils import build_commit_graph, make_object
+
+
+class BuildCommitGraphTest(TestCase):
+ def setUp(self):
+ super().setUp()
+ self.store = MemoryObjectStore()
+
+ def test_linear(self):
+ c1, c2 = build_commit_graph(self.store, [[1], [2, 1]])
+ for obj_id in [c1.id, c2.id, c1.tree, c2.tree]:
+ self.assertIn(obj_id, self.store)
+ self.assertEqual([], c1.parents)
+ self.assertEqual([c1.id], c2.parents)
+ self.assertEqual(c1.tree, c2.tree)
+ self.assertEqual([], self.store[c1.tree].items())
+ self.assertGreater(c2.commit_time, c1.commit_time)
+
+ def test_merge(self):
+ c1, c2, c3, c4 = build_commit_graph(
+ self.store, [[1], [2, 1], [3, 1], [4, 2, 3]]
+ )
+ self.assertEqual([c2.id, c3.id], c4.parents)
+ self.assertGreater(c4.commit_time, c2.commit_time)
+ self.assertGreater(c4.commit_time, c3.commit_time)
+
+ def test_missing_parent(self):
+ self.assertRaises(
+ ValueError, build_commit_graph, self.store, [[1], [3, 2], [2, 1]]
+ )
+
+ def test_trees(self):
+ a1 = make_object(Blob, data=b"aaa1")
+ a2 = make_object(Blob, data=b"aaa2")
+ c1, c2 = build_commit_graph(
+ self.store,
+ [[1], [2, 1]],
+ trees={1: [(b"a", a1)], 2: [(b"a", a2, 0o100644)]},
+ )
+ self.assertEqual((0o100644, a1.id), self.store[c1.tree][b"a"])
+ self.assertEqual((0o100644, a2.id), self.store[c2.tree][b"a"])
+
+ def test_attrs(self):
+ c1, c2 = build_commit_graph(
+ self.store, [[1], [2, 1]], attrs={1: {"message": b"Hooray!"}}
+ )
+ self.assertEqual(b"Hooray!", c1.message)
+ self.assertEqual(b"Commit 2", c2.message)
+
+ def test_commit_time(self):
+ c1, c2, c3 = build_commit_graph(
+ self.store,
+ [[1], [2, 1], [3, 2]],
+ attrs={1: {"commit_time": 124}, 2: {"commit_time": 123}},
+ )
+ self.assertEqual(124, c1.commit_time)
+ self.assertEqual(123, c2.commit_time)
+ self.assertTrue(c2.commit_time < c1.commit_time < c3.commit_time)
blob - /dev/null
blob + 79650c423517555b700212e11291946743e18e13 (mode 644)
--- /dev/null
+++ tests/test_walk.py
+# test_walk.py -- Tests for commit walking functionality.
+# Copyright (C) 2010 Google, Inc.
+#
+# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
+# General Public License as public by the Free Software Foundation; version 2.0
+# or (at your option) any later version. You can redistribute it and/or
+# modify it under the terms of either of these two licenses.
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# You should have received a copy of the licenses; if not, see
+# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
+# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
+# License, Version 2.0.
+#
+
+"""Tests for commit walking functionality."""
+
+from itertools import permutations
+from unittest import expectedFailure
+
+from dulwich.diff_tree import CHANGE_MODIFY, CHANGE_RENAME, RenameDetector, TreeChange
+from dulwich.errors import MissingCommitError
+from dulwich.object_store import MemoryObjectStore
+from dulwich.objects import Blob, Commit
+from dulwich.walk import ORDER_TOPO, WalkEntry, Walker, _topo_reorder
+
+from . import TestCase
+from .utils import F, build_commit_graph, make_object, make_tag
+
+
+class TestWalkEntry:
+ def __init__(self, commit, changes) -> None:
+ self.commit = commit
+ self.changes = changes
+
+ def __repr__(self) -> str:
+ return f"<TestWalkEntry commit={self.commit.id}, changes={self.changes!r}>"
+
+ def __eq__(self, other):
+ if not isinstance(other, WalkEntry) or self.commit != other.commit:
+ return False
+ if self.changes is None:
+ return True
+ return self.changes == other.changes()
+
+
+class WalkerTest(TestCase):
+ def setUp(self):
+ super().setUp()
+ self.store = MemoryObjectStore()
+
+ def make_commits(self, commit_spec, **kwargs):
+ times = kwargs.pop("times", [])
+ attrs = kwargs.pop("attrs", {})
+ for i, t in enumerate(times):
+ attrs.setdefault(i + 1, {})["commit_time"] = t
+ return build_commit_graph(self.store, commit_spec, attrs=attrs, **kwargs)
+
+ def make_linear_commits(self, num_commits, **kwargs):
+ commit_spec = []
+ for i in range(1, num_commits + 1):
+ c = [i]
+ if i > 1:
+ c.append(i - 1)
+ commit_spec.append(c)
+ return self.make_commits(commit_spec, **kwargs)
+
+ def assertWalkYields(self, expected, *args, **kwargs):
+ walker = Walker(self.store, *args, **kwargs)
+ expected = list(expected)
+ for i, entry in enumerate(expected):
+ if isinstance(entry, Commit):
+ expected[i] = TestWalkEntry(entry, None)
+ actual = list(walker)
+ self.assertEqual(expected, actual)
+
+ def test_tag(self):
+ c1, c2, c3 = self.make_linear_commits(3)
+ t2 = make_tag(target=c2)
+ self.store.add_object(t2)
+ self.assertWalkYields([c2, c1], [t2.id])
+
+ def test_linear(self):
+ c1, c2, c3 = self.make_linear_commits(3)
+ self.assertWalkYields([c1], [c1.id])
+ self.assertWalkYields([c2, c1], [c2.id])
+ self.assertWalkYields([c3, c2, c1], [c3.id])
+ self.assertWalkYields([c3, c2, c1], [c3.id, c1.id])
+ self.assertWalkYields([c3, c2], [c3.id], exclude=[c1.id])
+ self.assertWalkYields([c3, c2], [c3.id, c1.id], exclude=[c1.id])
+ self.assertWalkYields([c3], [c3.id, c1.id], exclude=[c2.id])
+
+ def test_missing(self):
+ cs = list(reversed(self.make_linear_commits(20)))
+ self.assertWalkYields(cs, [cs[0].id])
+
+ # Exactly how close we can get to a missing commit depends on our
+ # implementation (in particular the choice of _MAX_EXTRA_COMMITS), but
+ # we should at least be able to walk some history in a broken repo.
+ del self.store[cs[-1].id]
+ for i in range(1, 11):
+ self.assertWalkYields(cs[:i], [cs[0].id], max_entries=i)
+ self.assertRaises(MissingCommitError, Walker, self.store, [cs[-1].id])
+
+ def test_branch(self):
+ c1, x2, x3, y4 = self.make_commits([[1], [2, 1], [3, 2], [4, 1]])
+ self.assertWalkYields([x3, x2, c1], [x3.id])
+ self.assertWalkYields([y4, c1], [y4.id])
+ self.assertWalkYields([y4, x2, c1], [y4.id, x2.id])
+ self.assertWalkYields([y4, x2], [y4.id, x2.id], exclude=[c1.id])
+ self.assertWalkYields([y4, x3], [y4.id, x3.id], exclude=[x2.id])
+ self.assertWalkYields([y4], [y4.id], exclude=[x3.id])
+ self.assertWalkYields([x3, x2], [x3.id], exclude=[y4.id])
+
+ def test_merge(self):
+ c1, c2, c3, c4 = self.make_commits([[1], [2, 1], [3, 1], [4, 2, 3]])
+ self.assertWalkYields([c4, c3, c2, c1], [c4.id])
+ self.assertWalkYields([c3, c1], [c3.id])
+ self.assertWalkYields([c2, c1], [c2.id])
+ self.assertWalkYields([c4, c3], [c4.id], exclude=[c2.id])
+ self.assertWalkYields([c4, c2], [c4.id], exclude=[c3.id])
+
+ def test_merge_of_new_branch_from_old_base(self):
+ # The commit on the branch was made at a time after any of the
+ # commits on master, but the branch was from an older commit.
+ # See also test_merge_of_old_branch
+ self.maxDiff = None
+ c1, c2, c3, c4, c5 = self.make_commits(
+ [[1], [2, 1], [3, 2], [4, 1], [5, 3, 4]],
+ times=[1, 2, 3, 4, 5],
+ )
+ self.assertWalkYields([c5, c4, c3, c2, c1], [c5.id])
+ self.assertWalkYields([c3, c2, c1], [c3.id])
+ self.assertWalkYields([c2, c1], [c2.id])
+
+ @expectedFailure
+ def test_merge_of_old_branch(self):
+ # The commit on the branch was made at a time before any of
+ # the commits on master, but it was merged into master after
+ # those commits.
+ # See also test_merge_of_new_branch_from_old_base
+ self.maxDiff = None
+ c1, c2, c3, c4, c5 = self.make_commits(
+ [[1], [2, 1], [3, 2], [4, 1], [5, 3, 4]],
+ times=[1, 3, 4, 2, 5],
+ )
+ self.assertWalkYields([c5, c4, c3, c2, c1], [c5.id])
+ self.assertWalkYields([c3, c2, c1], [c3.id])
+ self.assertWalkYields([c2, c1], [c2.id])
+
+ def test_reverse(self):
+ c1, c2, c3 = self.make_linear_commits(3)
+ self.assertWalkYields([c1, c2, c3], [c3.id], reverse=True)
+
+ def test_max_entries(self):
+ c1, c2, c3 = self.make_linear_commits(3)
+ self.assertWalkYields([c3, c2, c1], [c3.id], max_entries=3)
+ self.assertWalkYields([c3, c2], [c3.id], max_entries=2)
+ self.assertWalkYields([c3], [c3.id], max_entries=1)
+
+ def test_reverse_after_max_entries(self):
+ c1, c2, c3 = self.make_linear_commits(3)
+ self.assertWalkYields([c1, c2, c3], [c3.id], max_entries=3, reverse=True)
+ self.assertWalkYields([c2, c3], [c3.id], max_entries=2, reverse=True)
+ self.assertWalkYields([c3], [c3.id], max_entries=1, reverse=True)
+
+ def test_changes_one_parent(self):
+ blob_a1 = make_object(Blob, data=b"a1")
+ blob_a2 = make_object(Blob, data=b"a2")
+ blob_b2 = make_object(Blob, data=b"b2")
+ c1, c2 = self.make_linear_commits(
+ 2,
+ trees={
+ 1: [(b"a", blob_a1)],
+ 2: [(b"a", blob_a2), (b"b", blob_b2)],
+ },
+ )
+ e1 = TestWalkEntry(c1, [TreeChange.add((b"a", F, blob_a1.id))])
+ e2 = TestWalkEntry(
+ c2,
+ [
+ TreeChange(CHANGE_MODIFY, (b"a", F, blob_a1.id), (b"a", F, blob_a2.id)),
+ TreeChange.add((b"b", F, blob_b2.id)),
+ ],
+ )
+ self.assertWalkYields([e2, e1], [c2.id])
+
+ def test_changes_multiple_parents(self):
+ blob_a1 = make_object(Blob, data=b"a1")
+ blob_b2 = make_object(Blob, data=b"b2")
+ blob_a3 = make_object(Blob, data=b"a3")
+ c1, c2, c3 = self.make_commits(
+ [[1], [2], [3, 1, 2]],
+ trees={
+ 1: [(b"a", blob_a1)],
+ 2: [(b"b", blob_b2)],
+ 3: [(b"a", blob_a3), (b"b", blob_b2)],
+ },
+ )
+ # a is a modify/add conflict and b is not conflicted.
+ changes = [
+ [
+ TreeChange(CHANGE_MODIFY, (b"a", F, blob_a1.id), (b"a", F, blob_a3.id)),
+ TreeChange.add((b"a", F, blob_a3.id)),
+ ]
+ ]
+ self.assertWalkYields(
+ [TestWalkEntry(c3, changes)], [c3.id], exclude=[c1.id, c2.id]
+ )
+
+ def test_path_matches(self):
+ walker = Walker(None, [], paths=[b"foo", b"bar", b"baz/quux"])
+ self.assertTrue(walker._path_matches(b"foo"))
+ self.assertTrue(walker._path_matches(b"foo/a"))
+ self.assertTrue(walker._path_matches(b"foo/a/b"))
+ self.assertTrue(walker._path_matches(b"bar"))
+ self.assertTrue(walker._path_matches(b"baz/quux"))
+ self.assertTrue(walker._path_matches(b"baz/quux/a"))
+
+ self.assertFalse(walker._path_matches(None))
+ self.assertFalse(walker._path_matches(b"oops"))
+ self.assertFalse(walker._path_matches(b"fool"))
+ self.assertFalse(walker._path_matches(b"baz"))
+ self.assertFalse(walker._path_matches(b"baz/quu"))
+
+ def test_paths(self):
+ blob_a1 = make_object(Blob, data=b"a1")
+ blob_b2 = make_object(Blob, data=b"b2")
+ blob_a3 = make_object(Blob, data=b"a3")
+ blob_b3 = make_object(Blob, data=b"b3")
+ c1, c2, c3 = self.make_linear_commits(
+ 3,
+ trees={
+ 1: [(b"a", blob_a1)],
+ 2: [(b"a", blob_a1), (b"x/b", blob_b2)],
+ 3: [(b"a", blob_a3), (b"x/b", blob_b3)],
+ },
+ )
+
+ self.assertWalkYields([c3, c2, c1], [c3.id])
+ self.assertWalkYields([c3, c1], [c3.id], paths=[b"a"])
+ self.assertWalkYields([c3, c2], [c3.id], paths=[b"x/b"])
+
+ # All changes are included, not just for requested paths.
+ changes = [
+ TreeChange(CHANGE_MODIFY, (b"a", F, blob_a1.id), (b"a", F, blob_a3.id)),
+ TreeChange(CHANGE_MODIFY, (b"x/b", F, blob_b2.id), (b"x/b", F, blob_b3.id)),
+ ]
+ self.assertWalkYields(
+ [TestWalkEntry(c3, changes)], [c3.id], max_entries=1, paths=[b"a"]
+ )
+
+ def test_paths_subtree(self):
+ blob_a = make_object(Blob, data=b"a")
+ blob_b = make_object(Blob, data=b"b")
+ c1, c2, c3 = self.make_linear_commits(
+ 3,
+ trees={
+ 1: [(b"x/a", blob_a)],
+ 2: [(b"b", blob_b), (b"x/a", blob_a)],
+ 3: [(b"b", blob_b), (b"x/a", blob_a), (b"x/b", blob_b)],
+ },
+ )
+ self.assertWalkYields([c2], [c3.id], paths=[b"b"])
+ self.assertWalkYields([c3, c1], [c3.id], paths=[b"x"])
+
+ def test_paths_max_entries(self):
+ blob_a = make_object(Blob, data=b"a")
+ blob_b = make_object(Blob, data=b"b")
+ c1, c2 = self.make_linear_commits(
+ 2, trees={1: [(b"a", blob_a)], 2: [(b"a", blob_a), (b"b", blob_b)]}
+ )
+ self.assertWalkYields([c2], [c2.id], paths=[b"b"], max_entries=1)
+ self.assertWalkYields([c1], [c1.id], paths=[b"a"], max_entries=1)
+
+ def test_paths_merge(self):
+ blob_a1 = make_object(Blob, data=b"a1")
+ blob_a2 = make_object(Blob, data=b"a2")
+ blob_a3 = make_object(Blob, data=b"a3")
+ x1, y2, m3, m4 = self.make_commits(
+ [[1], [2], [3, 1, 2], [4, 1, 2]],
+ trees={
+ 1: [(b"a", blob_a1)],
+ 2: [(b"a", blob_a2)],
+ 3: [(b"a", blob_a3)],
+ 4: [(b"a", blob_a1)],
+ },
+ ) # Non-conflicting
+ self.assertWalkYields([m3, y2, x1], [m3.id], paths=[b"a"])
+ self.assertWalkYields([y2, x1], [m4.id], paths=[b"a"])
+
+ def test_changes_with_renames(self):
+ blob = make_object(Blob, data=b"blob")
+ c1, c2 = self.make_linear_commits(
+ 2, trees={1: [(b"a", blob)], 2: [(b"b", blob)]}
+ )
+ entry_a = (b"a", F, blob.id)
+ entry_b = (b"b", F, blob.id)
+ changes_without_renames = [
+ TreeChange.delete(entry_a),
+ TreeChange.add(entry_b),
+ ]
+ changes_with_renames = [TreeChange(CHANGE_RENAME, entry_a, entry_b)]
+ self.assertWalkYields(
+ [TestWalkEntry(c2, changes_without_renames)],
+ [c2.id],
+ max_entries=1,
+ )
+ detector = RenameDetector(self.store)
+ self.assertWalkYields(
+ [TestWalkEntry(c2, changes_with_renames)],
+ [c2.id],
+ max_entries=1,
+ rename_detector=detector,
+ )
+
+ def test_follow_rename(self):
+ blob = make_object(Blob, data=b"blob")
+ names = [b"a", b"a", b"b", b"b", b"c", b"c"]
+
+ trees = {i + 1: [(n, blob, F)] for i, n in enumerate(names)}
+ c1, c2, c3, c4, c5, c6 = self.make_linear_commits(6, trees=trees)
+ self.assertWalkYields([c5], [c6.id], paths=[b"c"])
+
+ def e(n):
+ return (n, F, blob.id)
+
+ self.assertWalkYields(
+ [
+ TestWalkEntry(c5, [TreeChange(CHANGE_RENAME, e(b"b"), e(b"c"))]),
+ TestWalkEntry(c3, [TreeChange(CHANGE_RENAME, e(b"a"), e(b"b"))]),
+ TestWalkEntry(c1, [TreeChange.add(e(b"a"))]),
+ ],
+ [c6.id],
+ paths=[b"c"],
+ follow=True,
+ )
+
+ def test_follow_rename_remove_path(self):
+ blob = make_object(Blob, data=b"blob")
+ _, _, _, c4, c5, c6 = self.make_linear_commits(
+ 6,
+ trees={
+ 1: [(b"a", blob), (b"c", blob)],
+ 2: [],
+ 3: [],
+ 4: [(b"b", blob)],
+ 5: [(b"a", blob)],
+ 6: [(b"c", blob)],
+ },
+ )
+
+ def e(n):
+ return (n, F, blob.id)
+
+ # Once the path changes to b, we aren't interested in a or c anymore.
+ self.assertWalkYields(
+ [
+ TestWalkEntry(c6, [TreeChange(CHANGE_RENAME, e(b"a"), e(b"c"))]),
+ TestWalkEntry(c5, [TreeChange(CHANGE_RENAME, e(b"b"), e(b"a"))]),
+ TestWalkEntry(c4, [TreeChange.add(e(b"b"))]),
+ ],
+ [c6.id],
+ paths=[b"c"],
+ follow=True,
+ )
+
+ def test_since(self):
+ c1, c2, c3 = self.make_linear_commits(3)
+ self.assertWalkYields([c3, c2, c1], [c3.id], since=-1)
+ self.assertWalkYields([c3, c2, c1], [c3.id], since=0)
+ self.assertWalkYields([c3, c2], [c3.id], since=1)
+ self.assertWalkYields([c3, c2], [c3.id], since=99)
+ self.assertWalkYields([c3, c2], [c3.id], since=100)
+ self.assertWalkYields([c3], [c3.id], since=101)
+ self.assertWalkYields([c3], [c3.id], since=199)
+ self.assertWalkYields([c3], [c3.id], since=200)
+ self.assertWalkYields([], [c3.id], since=201)
+ self.assertWalkYields([], [c3.id], since=300)
+
+ def test_until(self):
+ c1, c2, c3 = self.make_linear_commits(3)
+ self.assertWalkYields([], [c3.id], until=-1)
+ self.assertWalkYields([c1], [c3.id], until=0)
+ self.assertWalkYields([c1], [c3.id], until=1)
+ self.assertWalkYields([c1], [c3.id], until=99)
+ self.assertWalkYields([c2, c1], [c3.id], until=100)
+ self.assertWalkYields([c2, c1], [c3.id], until=101)
+ self.assertWalkYields([c2, c1], [c3.id], until=199)
+ self.assertWalkYields([c3, c2, c1], [c3.id], until=200)
+ self.assertWalkYields([c3, c2, c1], [c3.id], until=201)
+ self.assertWalkYields([c3, c2, c1], [c3.id], until=300)
+
+ def test_since_until(self):
+ c1, c2, c3 = self.make_linear_commits(3)
+ self.assertWalkYields([], [c3.id], since=100, until=99)
+ self.assertWalkYields([c3, c2, c1], [c3.id], since=-1, until=201)
+ self.assertWalkYields([c2], [c3.id], since=100, until=100)
+ self.assertWalkYields([c2], [c3.id], since=50, until=150)
+
+ def test_since_over_scan(self):
+ commits = self.make_linear_commits(11, times=[9, 0, 1, 2, 3, 4, 5, 8, 6, 7, 9])
+ c8, _, c10, c11 = commits[-4:]
+ del self.store[commits[0].id]
+ # c9 is older than we want to walk, but is out of order with its
+ # parent, so we need to walk past it to get to c8.
+ # c1 would also match, but we've deleted it, and it should get pruned
+ # even with over-scanning.
+ self.assertWalkYields([c11, c10, c8], [c11.id], since=7)
+
+ def assertTopoOrderEqual(self, expected_commits, commits):
+ entries = [TestWalkEntry(c, None) for c in commits]
+ actual_ids = [e.commit.id for e in list(_topo_reorder(entries))]
+ self.assertEqual([c.id for c in expected_commits], actual_ids)
+
+ def test_topo_reorder_linear(self):
+ commits = self.make_linear_commits(5)
+ commits.reverse()
+ for perm in permutations(commits):
+ self.assertTopoOrderEqual(commits, perm)
+
+ def test_topo_reorder_multiple_parents(self):
+ c1, c2, c3 = self.make_commits([[1], [2], [3, 1, 2]])
+ # Already sorted, so totally FIFO.
+ self.assertTopoOrderEqual([c3, c2, c1], [c3, c2, c1])
+ self.assertTopoOrderEqual([c3, c1, c2], [c3, c1, c2])
+
+ # c3 causes one parent to be yielded.
+ self.assertTopoOrderEqual([c3, c2, c1], [c2, c3, c1])
+ self.assertTopoOrderEqual([c3, c1, c2], [c1, c3, c2])
+
+ # c3 causes both parents to be yielded.
+ self.assertTopoOrderEqual([c3, c2, c1], [c1, c2, c3])
+ self.assertTopoOrderEqual([c3, c2, c1], [c2, c1, c3])
+
+ def test_topo_reorder_multiple_children(self):
+ c1, c2, c3 = self.make_commits([[1], [2, 1], [3, 1]])
+
+ # c2 and c3 are FIFO but c1 moves to the end.
+ self.assertTopoOrderEqual([c3, c2, c1], [c3, c2, c1])
+ self.assertTopoOrderEqual([c3, c2, c1], [c3, c1, c2])
+ self.assertTopoOrderEqual([c3, c2, c1], [c1, c3, c2])
+
+ self.assertTopoOrderEqual([c2, c3, c1], [c2, c3, c1])
+ self.assertTopoOrderEqual([c2, c3, c1], [c2, c1, c3])
+ self.assertTopoOrderEqual([c2, c3, c1], [c1, c2, c3])
+
+ def test_out_of_order_children(self):
+ c1, c2, c3, c4, c5 = self.make_commits(
+ [[1], [2, 1], [3, 2], [4, 1], [5, 3, 4]], times=[2, 1, 3, 4, 5]
+ )
+ self.assertWalkYields([c5, c4, c3, c1, c2], [c5.id])
+ self.assertWalkYields([c5, c4, c3, c2, c1], [c5.id], order=ORDER_TOPO)
+
+ def test_out_of_order_with_exclude(self):
+ # Create the following graph:
+ # c1-------x2---m6
+ # \ /
+ # \-y3--y4-/--y5
+ # Due to skew, y5 is the oldest commit.
+ c1, x2, y3, y4, y5, m6 = self.make_commits(
+ [[1], [2, 1], [3, 1], [4, 3], [5, 4], [6, 2, 4]],
+ times=[2, 3, 4, 5, 1, 6],
+ )
+ self.assertWalkYields([m6, y4, y3, x2, c1], [m6.id])
+ # Ensure that c1..y4 get excluded even though they're popped from the
+ # priority queue long before y5.
+ self.assertWalkYields([m6, x2], [m6.id], exclude=[y5.id])
+
+ def test_empty_walk(self):
+ c1, c2, c3 = self.make_linear_commits(3)
+ self.assertWalkYields([], [c3.id], exclude=[c3.id])
+
+
+class WalkEntryTest(TestCase):
+ def setUp(self):
+ super().setUp()
+ self.store = MemoryObjectStore()
+
+ def make_commits(self, commit_spec, **kwargs):
+ times = kwargs.pop("times", [])
+ attrs = kwargs.pop("attrs", {})
+ for i, t in enumerate(times):
+ attrs.setdefault(i + 1, {})["commit_time"] = t
+ return build_commit_graph(self.store, commit_spec, attrs=attrs, **kwargs)
+
+ def make_linear_commits(self, num_commits, **kwargs):
+ commit_spec = []
+ for i in range(1, num_commits + 1):
+ c = [i]
+ if i > 1:
+ c.append(i - 1)
+ commit_spec.append(c)
+ return self.make_commits(commit_spec, **kwargs)
+
+ def test_all_changes(self):
+ # Construct a commit with 2 files in different subdirectories.
+ blob_a = make_object(Blob, data=b"a")
+ blob_b = make_object(Blob, data=b"b")
+ c1 = self.make_linear_commits(
+ 1,
+ trees={1: [(b"x/a", blob_a), (b"y/b", blob_b)]},
+ )[0]
+
+ # Get the WalkEntry for the commit.
+ walker = Walker(self.store, c1.id)
+ walker_entry = next(iter(walker))
+ changes = walker_entry.changes()
+
+ # Compare the changes with the expected values.
+ entry_a = (b"x/a", F, blob_a.id)
+ entry_b = (b"y/b", F, blob_b.id)
+ self.assertEqual(
+ [TreeChange.add(entry_a), TreeChange.add(entry_b)],
+ changes,
+ )
+
+ def test_all_with_merge(self):
+ blob_a = make_object(Blob, data=b"a")
+ blob_a2 = make_object(Blob, data=b"a2")
+ blob_b = make_object(Blob, data=b"b")
+ blob_b2 = make_object(Blob, data=b"b2")
+ x1, y2, m3 = self.make_commits(
+ [[1], [2], [3, 1, 2]],
+ trees={
+ 1: [(b"x/a", blob_a)],
+ 2: [(b"y/b", blob_b)],
+ 3: [(b"x/a", blob_a2), (b"y/b", blob_b2)],
+ },
+ )
+
+ # Get the WalkEntry for the merge commit.
+ walker = Walker(self.store, m3.id)
+ entries = list(walker)
+ walker_entry = entries[0]
+ self.assertEqual(walker_entry.commit.id, m3.id)
+ changes = walker_entry.changes()
+ self.assertEqual(2, len(changes))
+
+ entry_a = (b"x/a", F, blob_a.id)
+ entry_a2 = (b"x/a", F, blob_a2.id)
+ entry_b = (b"y/b", F, blob_b.id)
+ entry_b2 = (b"y/b", F, blob_b2.id)
+ self.assertEqual(
+ [
+ [
+ TreeChange(CHANGE_MODIFY, entry_a, entry_a2),
+ TreeChange.add(entry_a2),
+ ],
+ [
+ TreeChange.add(entry_b2),
+ TreeChange(CHANGE_MODIFY, entry_b, entry_b2),
+ ],
+ ],
+ changes,
+ )
+
+ def test_filter_changes(self):
+ # Construct a commit with 2 files in different subdirectories.
+ blob_a = make_object(Blob, data=b"a")
+ blob_b = make_object(Blob, data=b"b")
+ c1 = self.make_linear_commits(
+ 1,
+ trees={1: [(b"x/a", blob_a), (b"y/b", blob_b)]},
+ )[0]
+
+ # Get the WalkEntry for the commit.
+ walker = Walker(self.store, c1.id)
+ walker_entry = next(iter(walker))
+ changes = walker_entry.changes(path_prefix=b"x")
+
+ # Compare the changes with the expected values.
+ entry_a = (b"a", F, blob_a.id)
+ self.assertEqual(
+ [TreeChange.add(entry_a)],
+ changes,
+ )
+
+ def test_filter_with_merge(self):
+ blob_a = make_object(Blob, data=b"a")
+ blob_a2 = make_object(Blob, data=b"a2")
+ blob_b = make_object(Blob, data=b"b")
+ blob_b2 = make_object(Blob, data=b"b2")
+ x1, y2, m3 = self.make_commits(
+ [[1], [2], [3, 1, 2]],
+ trees={
+ 1: [(b"x/a", blob_a)],
+ 2: [(b"y/b", blob_b)],
+ 3: [(b"x/a", blob_a2), (b"y/b", blob_b2)],
+ },
+ )
+
+ # Get the WalkEntry for the merge commit.
+ walker = Walker(self.store, m3.id)
+ entries = list(walker)
+ walker_entry = entries[0]
+ self.assertEqual(walker_entry.commit.id, m3.id)
+ changes = walker_entry.changes(b"x")
+ self.assertEqual(1, len(changes))
+
+ entry_a = (b"a", F, blob_a.id)
+ entry_a2 = (b"a", F, blob_a2.id)
+ self.assertEqual(
+ [[TreeChange(CHANGE_MODIFY, entry_a, entry_a2)]],
+ changes,
+ )
blob - /dev/null
blob + d3ea62d6030b4d807c5d1c44f677f0c58c309c12 (mode 644)
--- /dev/null
+++ tests/test_web.py
+# test_web.py -- Tests for the git HTTP server
+# Copyright (C) 2010 Google, Inc.
+#
+# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
+# General Public License as public by the Free Software Foundation; version 2.0
+# or (at your option) any later version. You can redistribute it and/or
+# modify it under the terms of either of these two licenses.
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# You should have received a copy of the licenses; if not, see
+# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
+# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
+# License, Version 2.0.
+#
+
+"""Tests for the Git HTTP server."""
+
+import gzip
+import os
+import re
+from io import BytesIO
+from typing import Type
+
+from dulwich.object_store import MemoryObjectStore
+from dulwich.objects import Blob
+from dulwich.repo import BaseRepo, MemoryRepo
+from dulwich.server import DictBackend
+from dulwich.web import (
+ HTTP_ERROR,
+ HTTP_FORBIDDEN,
+ HTTP_NOT_FOUND,
+ HTTP_OK,
+ GunzipFilter,
+ HTTPGitApplication,
+ HTTPGitRequest,
+ _LengthLimitedFile,
+ get_idx_file,
+ get_info_packs,
+ get_info_refs,
+ get_loose_object,
+ get_pack_file,
+ get_text_file,
+ handle_service_request,
+ send_file,
+)
+
+from . import TestCase
+from .utils import make_object, make_tag
+
+
+class MinimalistWSGIInputStream:
+ """WSGI input stream with no 'seek()' and 'tell()' methods."""
+
+ def __init__(self, data) -> None:
+ self.data = data
+ self.pos = 0
+
+ def read(self, howmuch):
+ start = self.pos
+ end = self.pos + howmuch
+ if start >= len(self.data):
+ return b""
+ self.pos = end
+ return self.data[start:end]
+
+
+class MinimalistWSGIInputStream2(MinimalistWSGIInputStream):
+ """WSGI input stream with no *working* 'seek()' and 'tell()' methods."""
+
+ def seek(self, pos):
+ raise NotImplementedError
+
+ def tell(self):
+ raise NotImplementedError
+
+
+class TestHTTPGitRequest(HTTPGitRequest):
+ """HTTPGitRequest with overridden methods to help test caching."""
+
+ def __init__(self, *args, **kwargs) -> None:
+ HTTPGitRequest.__init__(self, *args, **kwargs)
+ self.cached = None
+
+ def nocache(self):
+ self.cached = False
+
+ def cache_forever(self):
+ self.cached = True
+
+
+class WebTestCase(TestCase):
+ """Base TestCase with useful instance vars and utility functions."""
+
+ _req_class: Type[HTTPGitRequest] = TestHTTPGitRequest
+
+ def setUp(self):
+ super().setUp()
+ self._environ = {}
+ self._req = self._req_class(
+ self._environ, self._start_response, handlers=self._handlers()
+ )
+ self._status = None
+ self._headers = []
+ self._output = BytesIO()
+
+ def _start_response(self, status, headers):
+ self._status = status
+ self._headers = list(headers)
+ return self._output.write
+
+ def _handlers(self):
+ return None
+
+ def assertContentTypeEquals(self, expected):
+ self.assertIn(("Content-Type", expected), self._headers)
+
+
+def _test_backend(objects, refs=None, named_files=None):
+ if not refs:
+ refs = {}
+ if not named_files:
+ named_files = {}
+ repo = MemoryRepo.init_bare(objects, refs)
+ for path, contents in named_files.items():
+ repo._put_named_file(path, contents)
+ return DictBackend({"/": repo})
+
+
+class DumbHandlersTestCase(WebTestCase):
+ def test_send_file_not_found(self):
+ list(send_file(self._req, None, "text/plain"))
+ self.assertEqual(HTTP_NOT_FOUND, self._status)
+
+ def test_send_file(self):
+ f = BytesIO(b"foobar")
+ output = b"".join(send_file(self._req, f, "some/thing"))
+ self.assertEqual(b"foobar", output)
+ self.assertEqual(HTTP_OK, self._status)
+ self.assertContentTypeEquals("some/thing")
+ self.assertTrue(f.closed)
+
+ def test_send_file_buffered(self):
+ bufsize = 10240
+ xs = b"x" * bufsize
+ f = BytesIO(2 * xs)
+ self.assertEqual([xs, xs], list(send_file(self._req, f, "some/thing")))
+ self.assertEqual(HTTP_OK, self._status)
+ self.assertContentTypeEquals("some/thing")
+ self.assertTrue(f.closed)
+
+ def test_send_file_error(self):
+ class TestFile:
+ def __init__(self, exc_class) -> None:
+ self.closed = False
+ self._exc_class = exc_class
+
+ def read(self, size=-1):
+ raise self._exc_class
+
+ def close(self):
+ self.closed = True
+
+ f = TestFile(IOError)
+ list(send_file(self._req, f, "some/thing"))
+ self.assertEqual(HTTP_ERROR, self._status)
+ self.assertTrue(f.closed)
+ self.assertFalse(self._req.cached)
+
+ # non-IOErrors are reraised
+ f = TestFile(AttributeError)
+ self.assertRaises(AttributeError, list, send_file(self._req, f, "some/thing"))
+ self.assertTrue(f.closed)
+ self.assertFalse(self._req.cached)
+
+ def test_get_text_file(self):
+ backend = _test_backend([], named_files={"description": b"foo"})
+ mat = re.search(".*", "description")
+ output = b"".join(get_text_file(self._req, backend, mat))
+ self.assertEqual(b"foo", output)
+ self.assertEqual(HTTP_OK, self._status)
+ self.assertContentTypeEquals("text/plain")
+ self.assertFalse(self._req.cached)
+
+ def test_get_loose_object(self):
+ blob = make_object(Blob, data=b"foo")
+ backend = _test_backend([blob])
+ mat = re.search("^(..)(.{38})$", blob.id.decode("ascii"))
+ output = b"".join(get_loose_object(self._req, backend, mat))
+ self.assertEqual(blob.as_legacy_object(), output)
+ self.assertEqual(HTTP_OK, self._status)
+ self.assertContentTypeEquals("application/x-git-loose-object")
+ self.assertTrue(self._req.cached)
+
+ def test_get_loose_object_missing(self):
+ mat = re.search("^(..)(.{38})$", "1" * 40)
+ list(get_loose_object(self._req, _test_backend([]), mat))
+ self.assertEqual(HTTP_NOT_FOUND, self._status)
+
+ def test_get_loose_object_error(self):
+ blob = make_object(Blob, data=b"foo")
+ backend = _test_backend([blob])
+ mat = re.search("^(..)(.{38})$", blob.id.decode("ascii"))
+
+ def as_legacy_object_error(self):
+ raise OSError
+
+ self.addCleanup(setattr, Blob, "as_legacy_object", Blob.as_legacy_object)
+ Blob.as_legacy_object = as_legacy_object_error
+ list(get_loose_object(self._req, backend, mat))
+ self.assertEqual(HTTP_ERROR, self._status)
+
+ def test_get_pack_file(self):
+ pack_name = os.path.join("objects", "pack", "pack-%s.pack" % ("1" * 40))
+ backend = _test_backend([], named_files={pack_name: b"pack contents"})
+ mat = re.search(".*", pack_name)
+ output = b"".join(get_pack_file(self._req, backend, mat))
+ self.assertEqual(b"pack contents", output)
+ self.assertEqual(HTTP_OK, self._status)
+ self.assertContentTypeEquals("application/x-git-packed-objects")
+ self.assertTrue(self._req.cached)
+
+ def test_get_idx_file(self):
+ idx_name = os.path.join("objects", "pack", "pack-%s.idx" % ("1" * 40))
+ backend = _test_backend([], named_files={idx_name: b"idx contents"})
+ mat = re.search(".*", idx_name)
+ output = b"".join(get_idx_file(self._req, backend, mat))
+ self.assertEqual(b"idx contents", output)
+ self.assertEqual(HTTP_OK, self._status)
+ self.assertContentTypeEquals("application/x-git-packed-objects-toc")
+ self.assertTrue(self._req.cached)
+
+ def test_get_info_refs(self):
+ self._environ["QUERY_STRING"] = ""
+
+ blob1 = make_object(Blob, data=b"1")
+ blob2 = make_object(Blob, data=b"2")
+ blob3 = make_object(Blob, data=b"3")
+
+ tag1 = make_tag(blob2, name=b"tag-tag")
+
+ objects = [blob1, blob2, blob3, tag1]
+ refs = {
+ b"HEAD": b"000",
+ b"refs/heads/master": blob1.id,
+ b"refs/tags/tag-tag": tag1.id,
+ b"refs/tags/blob-tag": blob3.id,
+ }
+ backend = _test_backend(objects, refs=refs)
+
+ mat = re.search(".*", "//info/refs")
+ self.assertEqual(
+ [
+ blob1.id + b"\trefs/heads/master\n",
+ blob3.id + b"\trefs/tags/blob-tag\n",
+ tag1.id + b"\trefs/tags/tag-tag\n",
+ blob2.id + b"\trefs/tags/tag-tag^{}\n",
+ ],
+ list(get_info_refs(self._req, backend, mat)),
+ )
+ self.assertEqual(HTTP_OK, self._status)
+ self.assertContentTypeEquals("text/plain")
+ self.assertFalse(self._req.cached)
+
+ def test_get_info_refs_not_found(self):
+ self._environ["QUERY_STRING"] = ""
+
+ objects = []
+ refs = {}
+ backend = _test_backend(objects, refs=refs)
+
+ mat = re.search("info/refs", "/foo/info/refs")
+ self.assertEqual(
+ [b"No git repository was found at /foo"],
+ list(get_info_refs(self._req, backend, mat)),
+ )
+ self.assertEqual(HTTP_NOT_FOUND, self._status)
+ self.assertContentTypeEquals("text/plain")
+
+ def test_get_info_packs(self):
+ class TestPackData:
+ def __init__(self, sha) -> None:
+ self.filename = "pack-%s.pack" % sha
+
+ class TestPack:
+ def __init__(self, sha) -> None:
+ self.data = TestPackData(sha)
+
+ packs = [TestPack(str(i) * 40) for i in range(1, 4)]
+
+ class TestObjectStore(MemoryObjectStore):
+ # property must be overridden, can't be assigned
+ @property
+ def packs(self):
+ return packs
+
+ store = TestObjectStore()
+ repo = BaseRepo(store, None)
+ backend = DictBackend({"/": repo})
+ mat = re.search(".*", "//info/packs")
+ output = b"".join(get_info_packs(self._req, backend, mat))
+ expected = b"".join(
+ [(b"P pack-" + s + b".pack\n") for s in [b"1" * 40, b"2" * 40, b"3" * 40]]
+ )
+ self.assertEqual(expected, output)
+ self.assertEqual(HTTP_OK, self._status)
+ self.assertContentTypeEquals("text/plain")
+ self.assertFalse(self._req.cached)
+
+
+class SmartHandlersTestCase(WebTestCase):
+ class _TestUploadPackHandler:
+ def __init__(
+ self,
+ backend,
+ args,
+ proto,
+ stateless_rpc=None,
+ advertise_refs=False,
+ ) -> None:
+ self.args = args
+ self.proto = proto
+ self.stateless_rpc = stateless_rpc
+ self.advertise_refs = advertise_refs
+
+ def handle(self):
+ self.proto.write(b"handled input: " + self.proto.recv(1024))
+
+ def _make_handler(self, *args, **kwargs):
+ self._handler = self._TestUploadPackHandler(*args, **kwargs)
+ return self._handler
+
+ def _handlers(self):
+ return {b"git-upload-pack": self._make_handler}
+
+ def test_handle_service_request_unknown(self):
+ mat = re.search(".*", "/git-evil-handler")
+ content = list(handle_service_request(self._req, "backend", mat))
+ self.assertEqual(HTTP_FORBIDDEN, self._status)
+ self.assertNotIn(b"git-evil-handler", b"".join(content))
+ self.assertFalse(self._req.cached)
+
+ def _run_handle_service_request(self, content_length=None):
+ self._environ["wsgi.input"] = BytesIO(b"foo")
+ if content_length is not None:
+ self._environ["CONTENT_LENGTH"] = content_length
+ mat = re.search(".*", "/git-upload-pack")
+
+ class Backend:
+ def open_repository(self, path):
+ return None
+
+ handler_output = b"".join(handle_service_request(self._req, Backend(), mat))
+ write_output = self._output.getvalue()
+ # Ensure all output was written via the write callback.
+ self.assertEqual(b"", handler_output)
+ self.assertEqual(b"handled input: foo", write_output)
+ self.assertContentTypeEquals("application/x-git-upload-pack-result")
+ self.assertFalse(self._handler.advertise_refs)
+ self.assertTrue(self._handler.stateless_rpc)
+ self.assertFalse(self._req.cached)
+
+ def test_handle_service_request(self):
+ self._run_handle_service_request()
+
+ def test_handle_service_request_with_length(self):
+ self._run_handle_service_request(content_length="3")
+
+ def test_handle_service_request_empty_length(self):
+ self._run_handle_service_request(content_length="")
+
+ def test_get_info_refs_unknown(self):
+ self._environ["QUERY_STRING"] = "service=git-evil-handler"
+
+ class Backend:
+ def open_repository(self, url):
+ return None
+
+ mat = re.search(".*", "/git-evil-pack")
+ content = list(get_info_refs(self._req, Backend(), mat))
+ self.assertNotIn(b"git-evil-handler", b"".join(content))
+ self.assertEqual(HTTP_FORBIDDEN, self._status)
+ self.assertFalse(self._req.cached)
+
+ def test_get_info_refs(self):
+ self._environ["wsgi.input"] = BytesIO(b"foo")
+ self._environ["QUERY_STRING"] = "service=git-upload-pack"
+
+ class Backend:
+ def open_repository(self, url):
+ return None
+
+ mat = re.search(".*", "/git-upload-pack")
+ handler_output = b"".join(get_info_refs(self._req, Backend(), mat))
+ write_output = self._output.getvalue()
+ self.assertEqual(
+ (
+ b"001e# service=git-upload-pack\n"
+ b"0000"
+ # input is ignored by the handler
+ b"handled input: "
+ ),
+ write_output,
+ )
+ # Ensure all output was written via the write callback.
+ self.assertEqual(b"", handler_output)
+ self.assertTrue(self._handler.advertise_refs)
+ self.assertTrue(self._handler.stateless_rpc)
+ self.assertFalse(self._req.cached)
+
+
+class LengthLimitedFileTestCase(TestCase):
+ def test_no_cutoff(self):
+ f = _LengthLimitedFile(BytesIO(b"foobar"), 1024)
+ self.assertEqual(b"foobar", f.read())
+
+ def test_cutoff(self):
+ f = _LengthLimitedFile(BytesIO(b"foobar"), 3)
+ self.assertEqual(b"foo", f.read())
+ self.assertEqual(b"", f.read())
+
+ def test_multiple_reads(self):
+ f = _LengthLimitedFile(BytesIO(b"foobar"), 3)
+ self.assertEqual(b"fo", f.read(2))
+ self.assertEqual(b"o", f.read(2))
+ self.assertEqual(b"", f.read())
+
+
+class HTTPGitRequestTestCase(WebTestCase):
+ # This class tests the contents of the actual cache headers
+ _req_class = HTTPGitRequest
+
+ def test_not_found(self):
+ self._req.cache_forever() # cache headers should be discarded
+ message = "Something not found"
+ self.assertEqual(message.encode("ascii"), self._req.not_found(message))
+ self.assertEqual(HTTP_NOT_FOUND, self._status)
+ self.assertEqual({("Content-Type", "text/plain")}, set(self._headers))
+
+ def test_forbidden(self):
+ self._req.cache_forever() # cache headers should be discarded
+ message = "Something not found"
+ self.assertEqual(message.encode("ascii"), self._req.forbidden(message))
+ self.assertEqual(HTTP_FORBIDDEN, self._status)
+ self.assertEqual({("Content-Type", "text/plain")}, set(self._headers))
+
+ def test_respond_ok(self):
+ self._req.respond()
+ self.assertEqual([], self._headers)
+ self.assertEqual(HTTP_OK, self._status)
+
+ def test_respond(self):
+ self._req.nocache()
+ self._req.respond(
+ status=402,
+ content_type="some/type",
+ headers=[("X-Foo", "foo"), ("X-Bar", "bar")],
+ )
+ self.assertEqual(
+ {
+ ("X-Foo", "foo"),
+ ("X-Bar", "bar"),
+ ("Content-Type", "some/type"),
+ ("Expires", "Fri, 01 Jan 1980 00:00:00 GMT"),
+ ("Pragma", "no-cache"),
+ ("Cache-Control", "no-cache, max-age=0, must-revalidate"),
+ },
+ set(self._headers),
+ )
+ self.assertEqual(402, self._status)
+
+
+class HTTPGitApplicationTestCase(TestCase):
+ def setUp(self):
+ super().setUp()
+ self._app = HTTPGitApplication("backend")
+
+ self._environ = {
+ "PATH_INFO": "/foo",
+ "REQUEST_METHOD": "GET",
+ }
+
+ def _test_handler(self, req, backend, mat):
+ # tests interface used by all handlers
+ self.assertEqual(self._environ, req.environ)
+ self.assertEqual("backend", backend)
+ self.assertEqual("/foo", mat.group(0))
+ return "output"
+
+ def _add_handler(self, app):
+ req = self._environ["REQUEST_METHOD"]
+ app.services = {
+ (req, re.compile("/foo$")): self._test_handler,
+ }
+
+ def test_call(self):
+ self._add_handler(self._app)
+ self.assertEqual("output", self._app(self._environ, None))
+
+ def test_fallback_app(self):
+ def test_app(environ, start_response):
+ return "output"
+
+ app = HTTPGitApplication("backend", fallback_app=test_app)
+ self.assertEqual("output", app(self._environ, None))
+
+
+class GunzipTestCase(HTTPGitApplicationTestCase):
+ __doc__ = """TestCase for testing the GunzipFilter, ensuring the wsgi.input
+ is correctly decompressed and headers are corrected.
+ """
+ example_text = __doc__.encode("ascii")
+
+ def setUp(self):
+ super().setUp()
+ self._app = GunzipFilter(self._app)
+ self._environ["HTTP_CONTENT_ENCODING"] = "gzip"
+ self._environ["REQUEST_METHOD"] = "POST"
+
+ def _get_zstream(self, text):
+ zstream = BytesIO()
+ zfile = gzip.GzipFile(fileobj=zstream, mode="wb")
+ zfile.write(text)
+ zfile.close()
+ zlength = zstream.tell()
+ zstream.seek(0)
+ return zstream, zlength
+
+ def _test_call(self, orig, zstream, zlength):
+ self._add_handler(self._app.app)
+ self.assertLess(zlength, len(orig))
+ self.assertEqual(self._environ["HTTP_CONTENT_ENCODING"], "gzip")
+ self._environ["CONTENT_LENGTH"] = zlength
+ self._environ["wsgi.input"] = zstream
+ self._app(self._environ, None)
+ buf = self._environ["wsgi.input"]
+ self.assertIsNot(buf, zstream)
+ buf.seek(0)
+ self.assertEqual(orig, buf.read())
+ self.assertIs(None, self._environ.get("CONTENT_LENGTH"))
+ self.assertNotIn("HTTP_CONTENT_ENCODING", self._environ)
+
+ def test_call(self):
+ self._test_call(self.example_text, *self._get_zstream(self.example_text))
+
+ def test_call_no_seek(self):
+ """This ensures that the gunzipping code doesn't require any methods on
+ 'wsgi.input' except for '.read()'. (In particular, it shouldn't
+ require '.seek()'. See https://github.com/jelmer/dulwich/issues/140.).
+ """
+ zstream, zlength = self._get_zstream(self.example_text)
+ self._test_call(
+ self.example_text,
+ MinimalistWSGIInputStream(zstream.read()),
+ zlength,
+ )
+
+ def test_call_no_working_seek(self):
+ """Similar to 'test_call_no_seek', but this time the methods are available
+ (but defunct). See https://github.com/jonashaag/klaus/issues/154.
+ """
+ zstream, zlength = self._get_zstream(self.example_text)
+ self._test_call(
+ self.example_text,
+ MinimalistWSGIInputStream2(zstream.read()),
+ zlength,
+ )
blob - /dev/null
blob + 13fe5a4542c3d02b3dcf7fd70194a01a39c6f7fd (mode 644)
--- /dev/null
+++ tests/utils.py
+# utils.py -- Test utilities for Dulwich.
+# Copyright (C) 2010 Google, Inc.
+#
+# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
+# General Public License as public by the Free Software Foundation; version 2.0
+# or (at your option) any later version. You can redistribute it and/or
+# modify it under the terms of either of these two licenses.
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# You should have received a copy of the licenses; if not, see
+# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
+# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
+# License, Version 2.0.
+#
+
+"""Utility functions common to Dulwich tests."""
+
+import datetime
+import os
+import shutil
+import tempfile
+import time
+import types
+import warnings
+
+from dulwich.index import commit_tree
+from dulwich.objects import Commit, FixedSha, Tag, object_class
+from dulwich.pack import (
+ DELTA_TYPES,
+ OFS_DELTA,
+ REF_DELTA,
+ SHA1Writer,
+ create_delta,
+ obj_sha,
+ write_pack_header,
+ write_pack_object,
+)
+from dulwich.repo import Repo
+
+from . import SkipTest
+
+# Plain files are very frequently used in tests, so let the mode be very short.
+F = 0o100644 # Shorthand mode for Files.
+
+
+def open_repo(name, temp_dir=None):
+ """Open a copy of a repo in a temporary directory.
+
+ Use this function for accessing repos in dulwich/tests/data/repos to avoid
+ accidentally or intentionally modifying those repos in place. Use
+ tear_down_repo to delete any temp files created.
+
+ Args:
+ name: The name of the repository, relative to
+ dulwich/tests/data/repos
+ temp_dir: temporary directory to initialize to. If not provided, a
+ temporary directory will be created.
+ Returns: An initialized Repo object that lives in a temporary directory.
+ """
+ if temp_dir is None:
+ temp_dir = tempfile.mkdtemp()
+ repo_dir = os.path.join(
+ os.path.dirname(__file__), "..", "testdata", "repos", name
+ )
+ temp_repo_dir = os.path.join(temp_dir, name)
+ shutil.copytree(repo_dir, temp_repo_dir, symlinks=True)
+ return Repo(temp_repo_dir)
+
+
+def tear_down_repo(repo):
+ """Tear down a test repository."""
+ repo.close()
+ temp_dir = os.path.dirname(repo.path.rstrip(os.sep))
+ shutil.rmtree(temp_dir)
+
+
+def make_object(cls, **attrs):
+ """Make an object for testing and assign some members.
+
+ This method creates a new subclass to allow arbitrary attribute
+ reassignment, which is not otherwise possible with objects having
+ __slots__.
+
+ Args:
+ attrs: dict of attributes to set on the new object.
+ Returns: A newly initialized object of type cls.
+ """
+
+ class TestObject(cls):
+ """Class that inherits from the given class, but without __slots__.
+
+ Note that classes with __slots__ can't have arbitrary attributes
+ monkey-patched in, so this is a class that is exactly the same only
+ with a __dict__ instead of __slots__.
+ """
+
+ TestObject.__name__ = "TestObject_" + cls.__name__
+
+ obj = TestObject()
+ for name, value in attrs.items():
+ if name == "id":
+ # id property is read-only, so we overwrite sha instead.
+ sha = FixedSha(value)
+ obj.sha = lambda: sha
+ else:
+ setattr(obj, name, value)
+ return obj
+
+
+def make_commit(**attrs):
+ """Make a Commit object with a default set of members.
+
+ Args:
+ attrs: dict of attributes to overwrite from the default values.
+ Returns: A newly initialized Commit object.
+ """
+ default_time = 1262304000 # 2010-01-01 00:00:00
+ all_attrs = {
+ "author": b"Test Author <test@nodomain.com>",
+ "author_time": default_time,
+ "author_timezone": 0,
+ "committer": b"Test Committer <test@nodomain.com>",
+ "commit_time": default_time,
+ "commit_timezone": 0,
+ "message": b"Test message.",
+ "parents": [],
+ "tree": b"0" * 40,
+ }
+ all_attrs.update(attrs)
+ return make_object(Commit, **all_attrs)
+
+
+def make_tag(target, **attrs):
+ """Make a Tag object with a default set of values.
+
+ Args:
+ target: object to be tagged (Commit, Blob, Tree, etc)
+ attrs: dict of attributes to overwrite from the default values.
+ Returns: A newly initialized Tag object.
+ """
+ target_id = target.id
+ target_type = object_class(target.type_name)
+ default_time = int(time.mktime(datetime.datetime(2010, 1, 1).timetuple()))
+ all_attrs = {
+ "tagger": b"Test Author <test@nodomain.com>",
+ "tag_time": default_time,
+ "tag_timezone": 0,
+ "message": b"Test message.",
+ "object": (target_type, target_id),
+ "name": b"Test Tag",
+ }
+ all_attrs.update(attrs)
+ return make_object(Tag, **all_attrs)
+
+
+def functest_builder(method, func):
+ """Generate a test method that tests the given function."""
+
+ def do_test(self):
+ method(self, func)
+
+ return do_test
+
+
+def ext_functest_builder(method, func):
+ """Generate a test method that tests the given extension function.
+
+ This is intended to generate test methods that test both a pure-Python
+ version and an extension version using common test code. The extension test
+ will raise SkipTest if the extension is not found.
+
+ Sample usage:
+
+ class MyTest(TestCase);
+ def _do_some_test(self, func_impl):
+ self.assertEqual('foo', func_impl())
+
+ test_foo = functest_builder(_do_some_test, foo_py)
+ test_foo_extension = ext_functest_builder(_do_some_test, _foo_c)
+
+ Args:
+ method: The method to run. It must must two parameters, self and the
+ function implementation to test.
+ func: The function implementation to pass to method.
+ """
+
+ def do_test(self):
+ if not isinstance(func, types.BuiltinFunctionType):
+ raise SkipTest("%s extension not found" % func)
+ method(self, func)
+
+ return do_test
+
+
+def build_pack(f, objects_spec, store=None):
+ """Write test pack data from a concise spec.
+
+ Args:
+ f: A file-like object to write the pack to.
+ objects_spec: A list of (type_num, obj). For non-delta types, obj
+ is the string of that object's data.
+ For delta types, obj is a tuple of (base, data), where:
+
+ * base can be either an index in objects_spec of the base for that
+ * delta; or for a ref delta, a SHA, in which case the resulting pack
+ * will be thin and the base will be an external ref.
+ * data is a string of the full, non-deltified data for that object.
+
+ Note that offsets/refs and deltas are computed within this function.
+ store: An optional ObjectStore for looking up external refs.
+ Returns: A list of tuples in the order specified by objects_spec:
+ (offset, type num, data, sha, CRC32)
+ """
+ sf = SHA1Writer(f)
+ num_objects = len(objects_spec)
+ write_pack_header(sf.write, num_objects)
+
+ full_objects = {}
+ offsets = {}
+ crc32s = {}
+
+ while len(full_objects) < num_objects:
+ for i, (type_num, data) in enumerate(objects_spec):
+ if type_num not in DELTA_TYPES:
+ full_objects[i] = (type_num, data, obj_sha(type_num, [data]))
+ continue
+ base, data = data
+ if isinstance(base, int):
+ if base not in full_objects:
+ continue
+ base_type_num, _, _ = full_objects[base]
+ else:
+ base_type_num, _ = store.get_raw(base)
+ full_objects[i] = (
+ base_type_num,
+ data,
+ obj_sha(base_type_num, [data]),
+ )
+
+ for i, (type_num, obj) in enumerate(objects_spec):
+ offset = f.tell()
+ if type_num == OFS_DELTA:
+ base_index, data = obj
+ base = offset - offsets[base_index]
+ _, base_data, _ = full_objects[base_index]
+ obj = (base, list(create_delta(base_data, data)))
+ elif type_num == REF_DELTA:
+ base_ref, data = obj
+ if isinstance(base_ref, int):
+ _, base_data, base = full_objects[base_ref]
+ else:
+ base_type_num, base_data = store.get_raw(base_ref)
+ base = obj_sha(base_type_num, base_data)
+ obj = (base, list(create_delta(base_data, data)))
+
+ crc32 = write_pack_object(sf.write, type_num, obj)
+ offsets[i] = offset
+ crc32s[i] = crc32
+
+ expected = []
+ for i in range(num_objects):
+ type_num, data, sha = full_objects[i]
+ assert len(sha) == 20
+ expected.append((offsets[i], type_num, data, sha, crc32s[i]))
+
+ sf.write_sha()
+ f.seek(0)
+ return expected
+
+
+def build_commit_graph(object_store, commit_spec, trees=None, attrs=None):
+ """Build a commit graph from a concise specification.
+
+ Sample usage:
+ >>> c1, c2, c3 = build_commit_graph(store, [[1], [2, 1], [3, 1, 2]])
+ >>> store[store[c3].parents[0]] == c1
+ True
+ >>> store[store[c3].parents[1]] == c2
+ True
+
+ If not otherwise specified, commits will refer to the empty tree and have
+ commit times increasing in the same order as the commit spec.
+
+ Args:
+ object_store: An ObjectStore to commit objects to.
+ commit_spec: An iterable of iterables of ints defining the commit
+ graph. Each entry defines one commit, and entries must be in
+ topological order. The first element of each entry is a commit number,
+ and the remaining elements are its parents. The commit numbers are only
+ meaningful for the call to make_commits; since real commit objects are
+ created, they will get created with real, opaque SHAs.
+ trees: An optional dict of commit number -> tree spec for building
+ trees for commits. The tree spec is an iterable of (path, blob, mode)
+ or (path, blob) entries; if mode is omitted, it defaults to the normal
+ file mode (0100644).
+ attrs: A dict of commit number -> (dict of attribute -> value) for
+ assigning additional values to the commits.
+ Returns: The list of commit objects created.
+
+ Raises:
+ ValueError: If an undefined commit identifier is listed as a parent.
+ """
+ if trees is None:
+ trees = {}
+ if attrs is None:
+ attrs = {}
+ commit_time = 0
+ nums = {}
+ commits = []
+
+ for commit in commit_spec:
+ commit_num = commit[0]
+ try:
+ parent_ids = [nums[pn] for pn in commit[1:]]
+ except KeyError as exc:
+ (missing_parent,) = exc.args
+ raise ValueError("Unknown parent %i" % missing_parent) from exc
+
+ blobs = []
+ for entry in trees.get(commit_num, []):
+ if len(entry) == 2:
+ path, blob = entry
+ entry = (path, blob, F)
+ path, blob, mode = entry
+ blobs.append((path, blob.id, mode))
+ object_store.add_object(blob)
+ tree_id = commit_tree(object_store, blobs)
+
+ commit_attrs = {
+ "message": ("Commit %i" % commit_num).encode("ascii"),
+ "parents": parent_ids,
+ "tree": tree_id,
+ "commit_time": commit_time,
+ }
+ commit_attrs.update(attrs.get(commit_num, {}))
+ commit_obj = make_commit(**commit_attrs)
+
+ # By default, increment the time by a lot. Out-of-order commits should
+ # be closer together than this because their main cause is clock skew.
+ commit_time = commit_attrs["commit_time"] + 100
+ nums[commit_num] = commit_obj.id
+ object_store.add_object(commit_obj)
+ commits.append(commit_obj)
+
+ return commits
+
+
+def setup_warning_catcher():
+ """Wrap warnings.showwarning with code that records warnings."""
+ caught_warnings = []
+ original_showwarning = warnings.showwarning
+
+ def custom_showwarning(*args, **kwargs):
+ caught_warnings.append(args[0])
+
+ warnings.showwarning = custom_showwarning
+
+ def restore_showwarning():
+ warnings.showwarning = original_showwarning
+
+ return caught_warnings, restore_showwarning