commit - 93095ab3575a767a59c4ff7b2f9bf0fcac08335b
commit + 8e548f72a416a6300bcf0cfd5d46284ab39c577d
blob - 659665c21e87afa5cbeafbbb74e328b0d8f16608
blob + 51594a39fc281388e601a07daaa5a7b36fbd5820
--- .github/workflows/pythonpackage.yml
+++ .github/workflows/pythonpackage.yml
strategy:
matrix:
os: [ubuntu-latest, macos-latest, windows-latest]
- python-version: [3.6, 3.7, 3.8, 3.9, 3.10-dev, pypy3]
+ python-version: ["3.6", "3.7", "3.8", "3.9", "3.10", pypy3]
exclude:
# sqlite3 exit handling seems to get in the way
- os: macos-latest
- name: Install dependencies
run: |
python -m pip install --upgrade pip
- pip install -U pip coverage codecov flake8 fastimport
+ pip install -U pip coverage codecov flake8 fastimport paramiko
- name: Install gpg on supported platforms
run: pip install -U gpg
if: "matrix.os != 'windows-latest' && matrix.python-version != 'pypy3'"
blob - faa4e95a3189e74e3468469d798ea3af5ff936ad (mode 644)
blob + /dev/null
--- .github/workflows/pythonpublish.yml
+++ /dev/null
-name: Upload Python Package
-
-on:
- push:
- tags:
- - dulwich-*
-
-jobs:
- deploy:
-
- runs-on: ${{ matrix.os }}
- strategy:
- matrix:
- os: [macos-latest, windows-latest]
- python-version: ['3.6', '3.7', '3.8', '3.9', '3.10']
- include:
- - os: ubuntu-latest
- python-version: '3.x'
- # path encoding
- fail-fast: false
-
- steps:
- - uses: actions/checkout@v2
- - name: Set up Python ${{ matrix.python-version }}
- uses: actions/setup-python@v2
- with:
- python-version: ${{ matrix.python-version }}
- - name: Install native dependencies (Ubuntu)
- run: sudo apt-get update && sudo apt-get install -y libgpgme-dev libgpg-error-dev
- if: "matrix.os == 'ubuntu-latest'"
- - name: Install native dependencies (MacOS)
- run: brew install swig gpgme
- if: "matrix.os == 'macos-latest'"
- - name: Install dependencies
- run: |
- python -m pip install --upgrade pip
- pip install setuptools wheel twine fastimport
- - name: Install gpg on supported platforms
- run: pip install -U gpg
- if: "matrix.os != 'windows-latest' && matrix.python-version != 'pypy3'"
- - name: Run test suite
- run: |
- python -m unittest dulwich.tests.test_suite
- - name: Build
- run: |
- python setup.py sdist bdist_wheel
- if: "matrix.os != 'ubuntu-latest'"
- - uses: docker/setup-qemu-action@v1
- name: Set up QEMU
- if: "matrix.os == 'ubuntu-latest'"
- - name: Build and publish (Linux aarch64)
- uses: RalfG/python-wheels-manylinux-build@v0.3.3-manylinux2014_aarch64
- with:
- python-versions: 'cp36-cp36m cp37-cp37m cp38-cp38 cp39-cp39 cp310-cp310'
- if: "matrix.os == 'ubuntu-latest'"
- - name: Build and publish (Linux)
- uses: RalfG/python-wheels-manylinux-build@v0.3.1
- with:
- python-versions: 'cp36-cp36m cp37-cp37m cp38-cp38 cp39-cp39 cp310-cp310'
- env:
- # Temporary fix for LD_LIBRARY_PATH issue. See
- # https://github.com/RalfG/python-wheels-manylinux-build/issues/26
- LD_LIBRARY_PATH: /usr/local/lib:${{ env.LD_LIBRARY_PATH }}
- if: "matrix.os == 'ubuntu-latest'"
- - name: Publish (Linux)
- env:
- TWINE_USERNAME: ${{ secrets.PYPI_USERNAME }}
- TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }}
- run: |
- # Only include *manylinux* wheels; the other wheels files are built but
- # rejected by pip.
- twine upload dist/*manylinux*.whl
- if: "matrix.os == 'ubuntu-latest'"
- - name: Publish
- env:
- TWINE_USERNAME: ${{ secrets.PYPI_USERNAME }}
- TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }}
- run: |
- twine upload dist/*.whl
- if: "matrix.os != 'ubuntu-latest'"
blob - /dev/null
blob + bc3dba462a887afe6041cfd1bf98a57675cc749b (mode 644)
--- /dev/null
+++ .github/workflows/pythonwheels.yml
+name: Build Python Wheels
+
+on:
+ push:
+ pull_request:
+ schedule:
+ - cron: '0 6 * * *' # Daily 6AM UTC build
+
+jobs:
+ build:
+
+ runs-on: ${{ matrix.os }}
+ strategy:
+ matrix:
+ os: [macos-latest, windows-latest]
+ python-version: ['3.6', '3.7', '3.8', '3.9', '3.10']
+ include:
+ - os: ubuntu-latest
+ python-version: '3.x'
+ # path encoding
+ fail-fast: true
+
+ steps:
+ - uses: actions/checkout@v2
+ - name: Set up Python ${{ matrix.python-version }}
+ uses: actions/setup-python@v2
+ with:
+ python-version: ${{ matrix.python-version }}
+ - name: Install native dependencies (Ubuntu)
+ run: sudo apt-get update && sudo apt-get install -y libgpgme-dev libgpg-error-dev
+ if: "matrix.os == 'ubuntu-latest'"
+ - name: Install native dependencies (MacOS)
+ run: brew install swig gpgme
+ if: "matrix.os == 'macos-latest'"
+ - name: Install dependencies
+ run: |
+ python -m pip install --upgrade pip
+ pip install setuptools wheel fastimport paramiko urllib3
+ - name: Install gpg on supported platforms
+ run: pip install -U gpg
+ if: "matrix.os != 'windows-latest' && matrix.python-version != 'pypy3'"
+ - name: Run test suite
+ run: |
+ python -m unittest dulwich.tests.test_suite
+ - name: Build
+ run: |
+ python setup.py sdist bdist_wheel
+ if: "matrix.os != 'ubuntu-latest'"
+ - uses: docker/setup-qemu-action@v1
+ name: Set up QEMU
+ if: "matrix.os == 'ubuntu-latest'"
+ - name: Build (Linux aarch64)
+ uses: RalfG/python-wheels-manylinux-build@v0.3.3-manylinux2014_aarch64
+ with:
+ python-versions: 'cp36-cp36m cp37-cp37m cp38-cp38 cp39-cp39 cp310-cp310'
+ if: "matrix.os == 'ubuntu-latest'"
+ - name: Build (Linux)
+ uses: RalfG/python-wheels-manylinux-build@v0.3.1
+ with:
+ python-versions: 'cp36-cp36m cp37-cp37m cp38-cp38 cp39-cp39 cp310-cp310'
+ env:
+ # Temporary fix for LD_LIBRARY_PATH issue. See
+ # https://github.com/RalfG/python-wheels-manylinux-build/issues/26
+ LD_LIBRARY_PATH: /usr/local/lib:${{ env.LD_LIBRARY_PATH }}
+ if: "matrix.os == 'ubuntu-latest'"
+ - name: Upload wheels (Linux)
+ uses: actions/upload-artifact@v2
+ # Only include *manylinux* wheels; the other wheels files are built but
+ # rejected by pip.
+ if: "matrix.os == 'ubuntu-latest'"
+ with:
+ name: dist
+ path: dist/*manylinux*.whl
+ - name: Upload wheels (non-Linux)
+ uses: actions/upload-artifact@v2
+ with:
+ name: dist
+ path: dist/*.whl
+ if: "matrix.os != 'ubuntu-latest'"
+
+ publish:
+ runs-on: ubuntu-latest
+
+ needs: build
+ if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/dulwich-')
+ steps:
+ - name: Set up Python
+ uses: actions/setup-python@v2
+ with:
+ python-version: "3.x"
+ - name: Install twine
+ run: |
+ python -m pip install --upgrade pip
+ pip install twine
+ - name: Download wheels
+ uses: actions/download-artifact@v2
+ - name: Publish wheels
+ env:
+ TWINE_USERNAME: ${{ secrets.PYPI_USERNAME }}
+ TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }}
+ run: twine upload dist/*.whl
blob - e6809e5671c9611848dbedc5b12f75108647fd8a
blob + 63d3dfa8fc48ef4dfbc5f4499d7d302a6d99bd24
--- Makefile
+++ Makefile
check-noextensions:: clean
$(RUNTEST) dulwich.tests.test_suite
+check-contrib:: clean
+ $(RUNTEST) -v dulwich.contrib.test_suite
+
check-all: check check-pypy check-noextensions
typing:
blob - f89546cd103dce449639447d94e4e8c6650d2b53
blob + dd0c012a95bc90d5aa584d2a1ea1437cfa5d3329
--- NEWS
+++ NEWS
+0.20.42 2022-05-24
+
+ * Drop ``RefsContainer.watch`` that was always flaky.
+ (Jelmer Vernooij, #886)
+
+0.20.41 2022-05-24
+
+ * Fix wheel uploading, properly. (Ruslan Kuprieiev)
+
+0.20.40 2022-05-19
+
+ * Fix wheel uploading. (Daniele Trifirò, Jelmer Vernooij)
+
+0.20.39 2022-05-19
+
+0.20.38 2022-05-17
+
+ * Disable paramiko tests if paramiko is not available. (Michał Górny)
+
+ * Set flag to re-enable paramiko server side on gentoo for running paramiko
+ tests. (Michał Górny)
+
+ * Increase tolerance when comparing time stamps; fixes some
+ spurious test failures on slow CI systems. (Jelmer Vernooij)
+
+ * Revert removal of caching of full HTTP response. This breaks
+ access to some HTTP servers.
+ (Jelmer Vernooij)
+
+0.20.37 2022-05-16
+
+ * Avoid making an extra copy when fetching pack files.
+ (Jelmer Vernooij)
+
+ * Add ``porcelain.remote_remove``.
+ (Jelmer Vernooij, #923)
+
+0.20.36 2022-05-15
+
+ * Add ``walk_untracked`` argument to ``porcelain.status``.
+ (Daniele Trifirò)
+
+ * Add tests for paramiko SSH Vendor.
+ (Filipp Frizzy)
+
0.20.35 2022-03-20
* Document the ``path`` attribute for ``Repo``.
blob - 898d25899cc09937c040d2f8c9a1e9fa914bee87
blob + 05c4d30ee39bfb34a2b0d627d8c71c5e1575b545
--- PKG-INFO
+++ PKG-INFO
Metadata-Version: 2.1
Name: dulwich
-Version: 0.20.35
+Version: 0.20.42
Summary: Python Git Library
Home-page: https://www.dulwich.io/
Author: Jelmer Vernooij
Requires-Python: >=3.6
Provides-Extra: fastimport
Provides-Extra: https
+Provides-Extra: paramiko
Provides-Extra: pgp
-Provides-Extra: watch
License-File: COPYING
License-File: AUTHORS
----
There is a *#dulwich* IRC channel on the `OFTC <https://www.oftc.net/>`_, and
-`dulwich-announce <https://groups.google.com/forum/#!forum/dulwich-announce>`_
-and `dulwich-discuss <https://groups.google.com/forum/#!forum/dulwich-discuss>`_
-mailing lists.
+a `dulwich-discuss <https://groups.google.com/forum/#!forum/dulwich-discuss>`_
+mailing list.
Contributing
------------
blob - 8f4f0bab3b99458cfe1f6cfd991be82ceb5c38c7
blob + 3eb080e6587660edbbe2df6083f3c9f13e67bfaa
--- README.rst
+++ README.rst
----
There is a *#dulwich* IRC channel on the `OFTC <https://www.oftc.net/>`_, and
-`dulwich-announce <https://groups.google.com/forum/#!forum/dulwich-announce>`_
-and `dulwich-discuss <https://groups.google.com/forum/#!forum/dulwich-discuss>`_
-mailing lists.
+a `dulwich-discuss <https://groups.google.com/forum/#!forum/dulwich-discuss>`_
+mailing list.
Contributing
------------
blob - 0280087fb5605eb2c6788bcbc757e3d6e6ca0616
blob + 1f523433c5f80ce3a5db12ff70721e7db5f40787
--- dulwich/__init__.py
+++ dulwich/__init__.py
"""Python implementation of the Git file formats and protocols."""
-__version__ = (0, 20, 35)
+__version__ = (0, 20, 42)
blob - 5f684ff047585e234af4dda7a315530501a81b6d
blob + 9120e51f673806fb2e2463068b7762ae3c89840e
--- dulwich/client.py
+++ dulwich/client.py
import socket
import subprocess
import sys
-from typing import Optional, Dict, Callable, Set
+from typing import Any, Callable, Dict, List, Optional, Set, Tuple, IO
from urllib.parse import (
quote as urlquote,
urlunsplit,
urlunparse,
)
+from urllib.request import url2pathname
+
import dulwich
from dulwich.config import get_xdg_config_home_path
from dulwich.errors import (
ZERO_SHA,
extract_capabilities,
parse_capability,
+ pkt_line,
)
from dulwich.pack import (
- write_pack_data,
write_pack_objects,
+ PackChunkGenerator,
)
from dulwich.refs import (
read_info_refs,
ANNOTATED_TAG_SUFFIX,
_import_remote_refs,
)
+from dulwich.repo import Repo
logger = logging.getLogger(__name__)
self._ref_statuses.append(ref_status)
-def read_pkt_refs(proto):
+def read_pkt_refs(pkt_seq):
server_capabilities = None
refs = {}
# Receive refs from server
- for pkt in proto.read_pkt_seq():
+ for pkt in pkt_seq:
(sha, ref) = pkt.rstrip(b"\n").split(None, 1)
if sha == b"ERR":
raise GitProtocolError(ref.decode("utf-8", "replace"))
return "%s(%r, %r)" % (self.__class__.__name__, self.refs, self.agent)
-def _read_shallow_updates(proto):
+def _read_shallow_updates(pkt_seq):
new_shallow = set()
new_unshallow = set()
- for pkt in proto.read_pkt_seq():
+ for pkt in pkt_seq:
cmd, sha = pkt.split(b" ", 1)
if cmd == COMMAND_SHALLOW:
new_shallow.add(sha.strip())
else:
raise GitProtocolError("unknown command %s" % pkt)
return (new_shallow, new_unshallow)
+
+
+class _v1ReceivePackHeader(object):
+
+ def __init__(self, capabilities, old_refs, new_refs):
+ self.want = []
+ self.have = []
+ self._it = self._handle_receive_pack_head(capabilities, old_refs, new_refs)
+ self.sent_capabilities = False
+
+ def __iter__(self):
+ return self._it
+
+ def _handle_receive_pack_head(self, capabilities, old_refs, new_refs):
+ """Handle the head of a 'git-receive-pack' request.
+
+ Args:
+ proto: Protocol object to read from
+ capabilities: List of negotiated capabilities
+ old_refs: Old refs, as received from the server
+ new_refs: Refs to change
+
+ Returns:
+ (have, want) tuple
+ """
+ self.have = [x for x in old_refs.values() if not x == ZERO_SHA]
+
+ for refname in new_refs:
+ if not isinstance(refname, bytes):
+ raise TypeError("refname is not a bytestring: %r" % refname)
+ old_sha1 = old_refs.get(refname, ZERO_SHA)
+ if not isinstance(old_sha1, bytes):
+ raise TypeError(
+ "old sha1 for %s is not a bytestring: %r" % (refname, old_sha1)
+ )
+ new_sha1 = new_refs.get(refname, ZERO_SHA)
+ if not isinstance(new_sha1, bytes):
+ raise TypeError(
+ "old sha1 for %s is not a bytestring %r" % (refname, new_sha1)
+ )
+
+ if old_sha1 != new_sha1:
+ logger.debug(
+ 'Sending updated ref %r: %r -> %r',
+ refname, old_sha1, new_sha1)
+ if self.sent_capabilities:
+ yield old_sha1 + b" " + new_sha1 + b" " + refname
+ else:
+ yield (
+ old_sha1
+ + b" "
+ + new_sha1
+ + b" "
+ + refname
+ + b"\0"
+ + b" ".join(sorted(capabilities))
+ )
+ self.sent_capabilities = True
+ if new_sha1 not in self.have and new_sha1 != ZERO_SHA:
+ self.want.append(new_sha1)
+ yield None
+
+
+def _read_side_band64k_data(pkt_seq, channel_callbacks):
+ """Read per-channel data.
+ This requires the side-band-64k capability.
+ Args:
+ pkt_seq: Sequence of packets to read
+ channel_callbacks: Dictionary mapping channels to packet
+ handlers to use. None for a callback discards channel data.
+ """
+ for pkt in pkt_seq:
+ channel = ord(pkt[:1])
+ pkt = pkt[1:]
+ try:
+ cb = channel_callbacks[channel]
+ except KeyError:
+ raise AssertionError("Invalid sideband channel %d" % channel)
+ else:
+ if cb is not None:
+ cb(pkt)
+
+
# TODO(durin42): this doesn't correctly degrade if the server doesn't
# support some capabilities. This should work properly with servers
# that don't support multi_ack.
checkout=None, branch=None, progress=None, depth=None):
"""Clone a repository."""
from .refs import _set_origin_head, _set_default_branch, _set_head
- from .repo import Repo
if mkdir:
os.mkdir(target_path)
else:
encoded_path = self.get_url(path).encode('utf-8')
+ assert target is not None
target_config = target.get_config()
target_config.set((b"remote", origin.encode('utf-8')), b"url", encoded_path)
target_config.set(
raise
return target
- def fetch(self, path, target, determine_wants=None, progress=None, depth=None):
+ def fetch(
+ self,
+ path: str,
+ target: Repo,
+ determine_wants: Optional[
+ Callable[[Dict[bytes, bytes], Optional[int]], List[bytes]]
+ ] = None,
+ progress: Optional[Callable[[bytes], None]] = None,
+ depth: Optional[int] = None
+ ) -> FetchPackResult:
"""Fetch into a target repository.
Args:
if CAPABILITY_THIN_PACK in self._fetch_capabilities:
# TODO(jelmer): Avoid reading entire file into memory and
# only processing it after the whole file has been fetched.
- f = BytesIO()
+ from tempfile import SpooledTemporaryFile
+ f = SpooledTemporaryFile() # type: IO[bytes]
def commit():
if f.tell():
f.seek(0)
target.object_store.add_thin_pack(f.read, None)
+ f.close()
def abort():
- pass
+ f.close()
else:
f, commit, abort = target.object_store.add_pack()
"""
raise NotImplementedError(self.get_refs)
- def _read_side_band64k_data(self, proto, channel_callbacks):
- """Read per-channel data.
-
- This requires the side-band-64k capability.
-
- Args:
- proto: Protocol object to read from
- channel_callbacks: Dictionary mapping channels to packet
- handlers to use. None for a callback discards channel data.
- """
- for pkt in proto.read_pkt_seq():
- channel = ord(pkt[:1])
- pkt = pkt[1:]
- try:
- cb = channel_callbacks[channel]
- except KeyError:
- raise AssertionError("Invalid sideband channel %d" % channel)
- else:
- if cb is not None:
- cb(pkt)
-
@staticmethod
def _should_send_pack(new_refs):
# The packfile MUST NOT be sent if the only command used is delete.
return any(sha != ZERO_SHA for sha in new_refs.values())
-
- def _handle_receive_pack_head(self, proto, capabilities, old_refs, new_refs):
- """Handle the head of a 'git-receive-pack' request.
-
- Args:
- proto: Protocol object to read from
- capabilities: List of negotiated capabilities
- old_refs: Old refs, as received from the server
- new_refs: Refs to change
-
- Returns:
- (have, want) tuple
- """
- want = []
- have = [x for x in old_refs.values() if not x == ZERO_SHA]
- sent_capabilities = False
-
- for refname in new_refs:
- if not isinstance(refname, bytes):
- raise TypeError("refname is not a bytestring: %r" % refname)
- old_sha1 = old_refs.get(refname, ZERO_SHA)
- if not isinstance(old_sha1, bytes):
- raise TypeError(
- "old sha1 for %s is not a bytestring: %r" % (refname, old_sha1)
- )
- new_sha1 = new_refs.get(refname, ZERO_SHA)
- if not isinstance(new_sha1, bytes):
- raise TypeError(
- "old sha1 for %s is not a bytestring %r" % (refname, new_sha1)
- )
-
- if old_sha1 != new_sha1:
- logger.debug(
- 'Sending updated ref %r: %r -> %r',
- refname, old_sha1, new_sha1)
- if sent_capabilities:
- proto.write_pkt_line(old_sha1 + b" " + new_sha1 + b" " + refname)
- else:
- proto.write_pkt_line(
- old_sha1
- + b" "
- + new_sha1
- + b" "
- + refname
- + b"\0"
- + b" ".join(sorted(capabilities))
- )
- sent_capabilities = True
- if new_sha1 not in have and new_sha1 != ZERO_SHA:
- want.append(new_sha1)
- proto.write_pkt_line(None)
- return (have, want)
def _negotiate_receive_pack_capabilities(self, server_capabilities):
negotiated_capabilities = self._send_capabilities & server_capabilities
channel_callbacks[1] = PktLineParser(
self._report_status_parser.handle_packet
).parse
- self._read_side_band64k_data(proto, channel_callbacks)
+ _read_side_band64k_data(proto.read_pkt_seq(), channel_callbacks)
else:
if CAPABILITY_REPORT_STATUS in capabilities:
for pkt in proto.read_pkt_seq():
)
proto.write_pkt_line(None)
if can_read is not None:
- (new_shallow, new_unshallow) = _read_shallow_updates(proto)
+ (new_shallow, new_unshallow) = _read_shallow_updates(proto.read_pkt_seq())
else:
new_shallow = new_unshallow = None
else:
def progress(x):
pass
- self._read_side_band64k_data(
- proto,
+ _read_side_band64k_data(
+ proto.read_pkt_seq(),
{
SIDE_BAND_CHANNEL_DATA: pack_data,
SIDE_BAND_CHANNEL_PROGRESS: progress,
proto, unused_can_read, stderr = self._connect(b"receive-pack", path)
with proto:
try:
- old_refs, server_capabilities = read_pkt_refs(proto)
+ old_refs, server_capabilities = read_pkt_refs(proto.read_pkt_seq())
except HangupException:
raise _remote_error_from_stderr(stderr)
(
ref_status = None
return SendPackResult(old_refs, agent=agent, ref_status=ref_status)
- (have, want) = self._handle_receive_pack_head(
- proto, negotiated_capabilities, old_refs, new_refs
- )
+ header_handler = _v1ReceivePackHeader(negotiated_capabilities, old_refs, new_refs)
+ for pkt in header_handler:
+ proto.write_pkt_line(pkt)
+
pack_data_count, pack_data = generate_pack_data(
- have,
- want,
+ header_handler.have,
+ header_handler.want,
ofs_delta=(CAPABILITY_OFS_DELTA in negotiated_capabilities),
)
if self._should_send_pack(new_refs):
- write_pack_data(proto.write_file(), pack_data_count, pack_data)
+ for chunk in PackChunkGenerator(pack_data_count, pack_data):
+ proto.write(chunk)
ref_status = self._handle_receive_pack_tail(
proto, negotiated_capabilities, progress
proto, can_read, stderr = self._connect(b"upload-pack", path)
with proto:
try:
- refs, server_capabilities = read_pkt_refs(proto)
+ refs, server_capabilities = read_pkt_refs(proto.read_pkt_seq())
except HangupException:
raise _remote_error_from_stderr(stderr)
(
proto, _, stderr = self._connect(b"upload-pack", path)
with proto:
try:
- refs, _ = read_pkt_refs(proto)
+ refs, _ = read_pkt_refs(proto.read_pkt_seq())
except HangupException:
raise _remote_error_from_stderr(stderr)
proto.write_pkt_line(None)
ret = proto.read_pkt_line()
if ret is not None:
raise AssertionError("expected pkt tail")
- self._read_side_band64k_data(
- proto,
+ _read_side_band64k_data(
+ proto.read_pkt_seq(),
{
SIDE_BAND_CHANNEL_DATA: write_data,
SIDE_BAND_CHANNEL_PROGRESS: progress,
self.proc.wait()
-def find_git_command():
+def find_git_command() -> List[str]:
"""Find command to run for system Git (usually C Git)."""
if sys.platform == "win32": # support .exe, .bat and .cmd
try: # to avoid overhead
@classmethod
def _open_repo(cls, path):
- from dulwich.repo import Repo
if not isinstance(path, str):
path = os.fsdecode(path)
raise GitProtocolError(
"unexpected first line %r from smart server" % pkt
)
- return read_pkt_refs(proto) + (base_url,)
+ return read_pkt_refs(proto.read_pkt_seq()) + (base_url,)
else:
return read_info_refs(resp), set(), base_url
finally:
headers = {
"Content-Type": "application/x-%s-request" % service,
"Accept": result_content_type,
- "Content-Length": str(len(data)),
}
+ if isinstance(data, bytes):
+ headers["Content-Length"] = str(len(data))
resp, read = self._http_request(url, headers, data)
if resp.content_type != result_content_type:
raise GitProtocolError(
return SendPackResult(new_refs, agent=agent, ref_status={})
if self.dumb:
raise NotImplementedError(self.fetch_pack)
- req_data = BytesIO()
- req_proto = Protocol(None, req_data.write)
- (have, want) = self._handle_receive_pack_head(
- req_proto, negotiated_capabilities, old_refs, new_refs
- )
- pack_data_count, pack_data = generate_pack_data(
- have,
- want,
- ofs_delta=(CAPABILITY_OFS_DELTA in negotiated_capabilities),
- )
- if self._should_send_pack(new_refs):
- write_pack_data(req_proto.write_file(), pack_data_count, pack_data)
+
+ def body_generator():
+ header_handler = _v1ReceivePackHeader(negotiated_capabilities, old_refs, new_refs)
+ for pkt in header_handler:
+ yield pkt_line(pkt)
+ pack_data_count, pack_data = generate_pack_data(
+ header_handler.have,
+ header_handler.want,
+ ofs_delta=(CAPABILITY_OFS_DELTA in negotiated_capabilities),
+ )
+ if self._should_send_pack(new_refs):
+ yield from PackChunkGenerator(pack_data_count, pack_data)
+
resp, read = self._smart_request(
- "git-receive-pack", url, data=req_data.getvalue()
+ "git-receive-pack", url, data=body_generator()
)
try:
resp_proto = Protocol(read, None)
try:
resp_proto = Protocol(read, None)
if new_shallow is None and new_unshallow is None:
- (new_shallow, new_unshallow) = _read_shallow_updates(resp_proto)
+ (new_shallow, new_unshallow) = _read_shallow_updates(resp_proto.read_pkt_seq())
self._handle_upload_pack_tail(
resp_proto,
negotiated_capabilities,
req_headers["Accept-Encoding"] = "identity"
if data is None:
- resp = self.pool_manager.request("GET", url, headers=req_headers)
+ resp = self.pool_manager.request(
+ "GET", url, headers=req_headers, preload_content=False)
else:
resp = self.pool_manager.request(
- "POST", url, headers=req_headers, body=data
+ "POST", url, headers=req_headers, body=data, preload_content=False
)
if resp.status == 404:
"unexpected http resp %d for %s" % (resp.status, url)
)
- # TODO: Optimization available by adding `preload_content=False` to the
- # request and just passing the `read` method on instead of going via
- # `BytesIO`, if we can guarantee that the entire response is consumed
- # before issuing the next to still allow for connection reuse from the
- # pool.
- read = BytesIO(resp.data).read
-
resp.content_type = resp.getheader("Content-Type")
# Check if geturl() is available (urllib3 version >= 1.23)
try:
resp.redirect_location = resp.get_redirect_location()
else:
resp.redirect_location = resp_url if resp_url != url else ""
- return resp, read
+ # TODO(jelmer): Remove BytesIO() call that caches entire response in
+ # memory. See https://github.com/jelmer/dulwich/issues/966
+ return resp, BytesIO(resp.data).read
HttpGitClient = Urllib3HttpGitClient
+def _win32_url_to_path(parsed) -> str:
+ """
+ Convert a file: URL to a path.
+
+ https://datatracker.ietf.org/doc/html/rfc8089
+ """
+ assert sys.platform == "win32" or os.name == "nt"
+ assert parsed.scheme == "file"
+
+ _, netloc, path, _, _, _ = parsed
+
+ if netloc == "localhost" or not netloc:
+ netloc = ""
+ elif (
+ netloc
+ and len(netloc) >= 2
+ and netloc[0].isalpha()
+ and netloc[1:2] in (":", ":/")
+ ):
+ # file://C:/foo.bar/baz or file://C://foo.bar//baz
+ netloc = netloc[:2]
+ else:
+ raise NotImplementedError("Non-local file URLs are not supported")
+
+ return url2pathname(netloc + path)
+
+
def get_transport_and_path_from_url(url, config=None, **kwargs):
"""Obtain a git client from a URL.
parsed.path,
)
elif parsed.scheme == "file":
+ if sys.platform == "win32" or os.name == "nt":
+ return default_local_git_client_cls(**kwargs), _win32_url_to_path(parsed)
return (
default_local_git_client_cls.from_parsedurl(parsed, **kwargs),
parsed.path,
return (user, host, path)
-def get_transport_and_path(location, **kwargs):
+def get_transport_and_path(
+ location: str,
+ **kwargs: Any
+) -> Tuple[GitClient, str]:
"""Obtain a git client from a URL.
Args:
blob - 7a91806fad30162070ece5f4ab3913cc08eec8dc
blob + 2eab7d9cdfb1af20c1aad078a524b0de85c70d56
--- dulwich/config.py
+++ dulwich/config.py
import sys
import warnings
-from typing import BinaryIO, Tuple, Optional
+from typing import BinaryIO, Iterator, KeysView, Optional, Tuple, Union
try:
from collections.abc import (
def __len__(self):
return len(self._keyed)
- def keys(self):
+ def keys(self) -> KeysView[Tuple[bytes, ...]]:
return self._keyed.keys()
def items(self):
"""
raise NotImplementedError(self.sections)
- def has_section(self, name):
+ def has_section(self, name: Tuple[bytes, ...]) -> bool:
"""Check if a specified section exists.
Args:
return self._values[(section[0],)].get_all(name)
- def get(self, section, name):
+ def get( # type: ignore[override]
+ self,
+ section: Union[bytes, str, Tuple[Union[bytes, str], ...]],
+ name: Union[str, bytes]
+ ) -> Optional[bytes]:
section, name = self._check_section_and_name(section, name)
if len(section) > 1:
return self.writable.set(section, name, value)
-def parse_submodules(config):
+def parse_submodules(config: ConfigFile) -> Iterator[Tuple[bytes, bytes, bytes]]:
"""Parse a gitmodules GitConfig file, returning submodules.
Args:
section_kind, section_name = section
if section_kind == b"submodule":
sm_path = config.get(section, b"path")
+ assert sm_path is not None
sm_url = config.get(section, b"url")
+ assert sm_url is not None
yield (sm_path, sm_url, section_name)
blob - 52bf7cae3ac7bac9622d3ce06655f8fc2a699b31
blob + cc23d2af193e49247691ff96501ea8380ebb7aff
--- dulwich/contrib/__init__.py
+++ dulwich/contrib/__init__.py
import unittest
names = [
+ "paramiko_vendor",
"release_robot",
"swift",
]
blob - ec32966bdefe476a26c9141f8f7dc024ab385296
blob + 33496543afbf40e7153e151911ee656ebfb30b8f
--- dulwich/contrib/test_paramiko_vendor.py
+++ dulwich/contrib/test_paramiko_vendor.py
"""Tests for paramiko_vendor."""
import socket
-import paramiko
import threading
from dulwich.tests import TestCase
-from dulwich.contrib.paramiko_vendor import ParamikoSSHVendor
+from io import StringIO
+from unittest import skipIf
+
try:
- from StringIO import StringIO
+ import paramiko
except ImportError:
- from io import StringIO
+ has_paramiko = False
+else:
+ has_paramiko = True
+ from dulwich.contrib.paramiko_vendor import ParamikoSSHVendor
+ class Server(paramiko.ServerInterface):
+ """http://docs.paramiko.org/en/2.4/api/server.html"""
+ def __init__(self, commands, *args, **kwargs):
+ super(Server, self).__init__(*args, **kwargs)
+ self.commands = commands
+ def check_channel_exec_request(self, channel, command):
+ self.commands.append(command)
+ return True
+
+ def check_auth_password(self, username, password):
+ if username == USER and password == PASSWORD:
+ return paramiko.AUTH_SUCCESSFUL
+ return paramiko.AUTH_FAILED
+
+ def check_auth_publickey(self, username, key):
+ pubkey = paramiko.RSAKey.from_private_key(StringIO(CLIENT_KEY))
+ if username == USER and key == pubkey:
+ return paramiko.AUTH_SUCCESSFUL
+ return paramiko.AUTH_FAILED
+
+ def check_channel_request(self, kind, chanid):
+ if kind == "session":
+ return paramiko.OPEN_SUCCEEDED
+ return paramiko.OPEN_FAILED_ADMINISTRATIVELY_PROHIBITED
+
+ def get_allowed_auths(self, username):
+ return "password,publickey"
+
+
USER = 'testuser'
PASSWORD = 'test'
SERVER_KEY = """\
R6legDG2e/50ph7yc8gwAaA1kUXMiuLi8Nfkw/3yyvmJwklNegi4aRzRbA2Mzhi2
4q9WMQKBgQCb0JNyxHG4pvLWCF/j0Sm1FfvrpnqSv5678n1j4GX7Ka/TubOK1Y4K
U+Oib7dKa/zQMWehVFNTayrsq6bKVZ6q7zG+IHiRLw4wjeAxREFH6WUjDrn9vl2l
-D48DKbBuBwuVOJWyq3qbfgJXojscgNQklrsPdXVhDwOF0dYxP89HnA=="""
+D48DKbBuBwuVOJWyq3qbfgJXojscgNQklrsPdXVhDwOF0dYxP89HnA==
+-----END RSA PRIVATE KEY-----"""
CLIENT_KEY = """\
-----BEGIN RSA PRIVATE KEY-----
MIIEpAIBAAKCAQEAxvREKSElPOm/0z/nPO+j5rk2tjdgGcGc7We1QZ6TRXYLu7nN
-----END RSA PRIVATE KEY-----"""
-class Server(paramiko.ServerInterface):
- """http://docs.paramiko.org/en/2.4/api/server.html"""
- def __init__(self, commands, *args, **kwargs):
- super(Server, self).__init__(*args, **kwargs)
- self.commands = commands
+@skipIf(not has_paramiko, "paramiko is not installed")
+class ParamikoSSHVendorTests(TestCase):
- def check_channel_exec_request(self, channel, command):
- self.commands.append(command)
- return True
+ def setUp(self):
+ import paramiko.transport
- def check_auth_password(self, username, password):
- if username == USER and password == PASSWORD:
- return paramiko.AUTH_SUCCESSFUL
- return paramiko.AUTH_FAILED
+ # reenable server functionality for tests
+ if hasattr(paramiko.transport, "SERVER_DISABLED_BY_GENTOO"):
+ paramiko.transport.SERVER_DISABLED_BY_GENTOO = False
- def check_auth_publickey(self, username, key):
- pubkey = paramiko.RSAKey.from_private_key(StringIO(CLIENT_KEY))
- if username == USER and key == pubkey:
- return paramiko.AUTH_SUCCESSFUL
- return paramiko.AUTH_FAILED
-
- def check_channel_request(self, kind, chanid):
- if kind == "session":
- return paramiko.OPEN_SUCCEEDED
- return paramiko.OPEN_FAILED_ADMINISTRATIVELY_PROHIBITED
-
- def get_allowed_auths(self, username):
- return "password,publickey"
-
-
-class ParamikoSSHVendorTests(TestCase):
- def setUp(self):
self.commands = []
socket.setdefaulttimeout(10)
self.addCleanup(socket.setdefaulttimeout, None)
self.thread.start()
def tearDown(self):
- pass
+ self.thread.join()
def _run(self):
try:
blob - 8c44edd64993f6bb5b9a61fa3456f49cef2c1338
blob + a6a5f1d67d82a01991b2376302b34fff40058513
--- dulwich/contrib/test_swift.py
+++ dulwich/contrib/test_swift.py
try:
import gevent # noqa:F401
-except ImportError:
+except ModuleNotFoundError:
missing_libs.append("gevent")
try:
import geventhttpclient # noqa:F401
-except ImportError:
+except ModuleNotFoundError:
missing_libs.append("geventhttpclient")
try:
from unittest.mock import patch
-except ImportError:
+except ModuleNotFoundError:
missing_libs.append("mock")
skipmsg = "Required libraries are not installed (%r)" % missing_libs
blob - 5918fa5c48bdc68b61a7969eef2a079f6308c54b
blob + 561b6f96d282da770f2ba9a6a95a372b951bfef6
--- dulwich/object_store.py
+++ dulwich/object_store.py
import stat
import sys
+from typing import Callable, Dict, List, Optional, Tuple
+
from dulwich.diff_tree import (
tree_changes,
walk_trees,
class BaseObjectStore(object):
"""Object store interface."""
- def determine_wants_all(self, refs, depth=None):
+ def determine_wants_all(
+ self,
+ refs: Dict[bytes, bytes],
+ depth: Optional[int] = None
+ ) -> List[bytes]:
def _want_deepen(sha):
if not depth:
return False
def __iter__(self):
"""Iterate over the SHAs that are present in this store."""
raise NotImplementedError(self.__iter__)
+
+ def add_pack(
+ self
+ ) -> Tuple[BytesIO, Callable[[], None], Callable[[], None]]:
+ """Add a new pack to this object store."""
+ raise NotImplementedError(self.add_pack)
def add_object(self, obj):
"""Add a single object to this object store."""
blob - 5c1eb3a6880bb91cdb18a512cfde52b02b1e1990
blob + 90f7de22615ee11b67adfd203f46044e4367e7a7
--- dulwich/objects.py
+++ dulwich/objects.py
if not isinstance(other, ShaFile):
raise TypeError
return self.id <= other.id
-
- def __cmp__(self, other):
- """Compare the SHA of this object with that of the other object."""
- if not isinstance(other, ShaFile):
- raise TypeError
- return cmp(self.id, other.id) # noqa: F821
class Blob(ShaFile):
blob - e7d969eb9b52e7983d2f3a780038242848a3083b
blob + 2b230f620bd00fd8e0000912eb3c170a7afad6f6
--- dulwich/pack.py
+++ dulwich/pack.py
pack_contents,
compression_level=compression_level,
)
+
+
+class PackChunkGenerator(object):
+
+ def __init__(self, num_records=None, records=None, progress=None, compression_level=-1):
+ self.cs = sha1(b"")
+ self.entries = {}
+ self._it = self._pack_data_chunks(
+ num_records=num_records, records=records, progress=progress, compression_level=compression_level)
+
+ def sha1digest(self):
+ return self.cs.digest()
+
+ def __iter__(self):
+ return self._it
+
+ def _pack_data_chunks(self, num_records=None, records=None, progress=None, compression_level=-1):
+ """Iterate pack data file chunks..
+
+ Args:
+ num_records: Number of records (defaults to len(records) if None)
+ records: Iterator over type_num, object_id, delta_base, raw
+ progress: Function to report progress to
+ compression_level: the zlib compression level
+ Returns: Dict mapping id -> (offset, crc32 checksum), pack checksum
+ """
+ # Write the pack
+ if num_records is None:
+ num_records = len(records)
+ f = BytesIO()
+ write_pack_header(f, num_records)
+ self.cs.update(f.getvalue())
+ yield f.getvalue()
+ offset = f.tell()
+ actual_num_records = 0
+ for i, (type_num, object_id, delta_base, raw) in enumerate(records):
+ if progress is not None:
+ progress(("writing pack data: %d/%d\r" % (i, num_records)).encode("ascii"))
+ if delta_base is not None:
+ try:
+ base_offset, base_crc32 = self.entries[delta_base]
+ except KeyError:
+ type_num = REF_DELTA
+ raw = (delta_base, raw)
+ else:
+ type_num = OFS_DELTA
+ raw = (offset - base_offset, raw)
+ f = BytesIO()
+ crc32 = write_pack_object(f, type_num, raw, compression_level=compression_level)
+ self.cs.update(f.getvalue())
+ yield f.getvalue()
+ actual_num_records += 1
+ self.entries[object_id] = (offset, crc32)
+ offset += f.tell()
+ if actual_num_records != num_records:
+ raise AssertionError(
+ 'actual records written differs: %d != %d' % (
+ actual_num_records, num_records))
+ yield self.cs.digest()
+
def write_pack_data(f, num_records=None, records=None, progress=None, compression_level=-1):
"""Write a new pack data file.
compression_level: the zlib compression level
Returns: Dict mapping id -> (offset, crc32 checksum), pack checksum
"""
- # Write the pack
- entries = {}
- f = SHA1Writer(f)
- if num_records is None:
- num_records = len(records)
- write_pack_header(f, num_records)
- actual_num_records = 0
- for i, (type_num, object_id, delta_base, raw) in enumerate(records):
- if progress is not None:
- progress(("writing pack data: %d/%d\r" % (i, num_records)).encode("ascii"))
- offset = f.offset()
- if delta_base is not None:
- try:
- base_offset, base_crc32 = entries[delta_base]
- except KeyError:
- type_num = REF_DELTA
- raw = (delta_base, raw)
- else:
- type_num = OFS_DELTA
- raw = (offset - base_offset, raw)
- crc32 = write_pack_object(f, type_num, raw, compression_level=compression_level)
- actual_num_records += 1
- entries[object_id] = (offset, crc32)
- if actual_num_records != num_records:
- raise AssertionError(
- 'actual records written differs: %d != %d' % (
- actual_num_records, num_records))
- return entries, f.write_sha()
+ chunk_generator = PackChunkGenerator(
+ num_records=num_records, records=records, progress=progress,
+ compression_level=compression_level)
+ for chunk in chunk_generator:
+ f.write(chunk)
+ return chunk_generator.entries, chunk_generator.sha1digest()
def write_pack_index_v1(f, entries, pack_checksum):
blob - 699864caa0e476d41beda09a4b630442422bd036
blob + 1c1d8c4cb574c330838b5e118156315ec1e2ec44
--- dulwich/porcelain.py
+++ dulwich/porcelain.py
mkdir = not os.path.exists(target)
- (client, path) = get_transport_and_path(source)
+ (client, path) = get_transport_and_path(source, **kwargs)
return client.clone(
path,
if config.has_section(section):
remote_name = encoded_location.decode()
url = config.get(section, "url")
+ assert url is not None
encoded_location = url
else:
remote_name = None
_import_remote_refs(r.refs, remote_name, fetch_result.refs)
-def status(repo=".", ignored=False):
+def status(repo=".", ignored=False, untracked_files="all"):
"""Returns staged, unstaged, and untracked changes relative to the HEAD.
Args:
repo: Path to repository or repository object
ignored: Whether to include ignored files in untracked
+ untracked_files: How to handle untracked files, defaults to "all":
+ "no": do not return untracked files
+ "all": include all files in untracked directories
+ Using `untracked_files="no"` can be faster than "all" when the worktreee
+ contains many untracked files/directories.
+
+ Note: `untracked_files="normal" (`git`'s default) is not implemented.
+
Returns: GitStatus tuple,
staged - dict with lists of staged paths (diff index/HEAD)
unstaged - list of unstaged paths (diff index/working-tree)
unstaged_changes = list(get_unstaged_changes(index, r.path, filter_callback))
untracked_paths = get_untracked_paths(
- r.path, r.path, index, exclude_ignored=not ignored
+ r.path,
+ r.path,
+ index,
+ exclude_ignored=not ignored,
+ untracked_files=untracked_files,
)
untracked_changes = list(untracked_paths)
dirnames[:] = prune_dirnames(dirpath, dirnames)
-def get_untracked_paths(frompath, basepath, index, exclude_ignored=False):
+def get_untracked_paths(
+ frompath, basepath, index, exclude_ignored=False, untracked_files="all"
+):
"""Get untracked paths.
Args:
basepath: Path to compare to
index: Index to check against
exclude_ignored: Whether to exclude ignored paths
+ untracked_files: How to handle untracked files:
+ - "no": return an empty list
+ - "all": return all files in untracked directories
+ - "normal": Not implemented
Note: ignored directories will never be walked for performance reasons.
If exclude_ignored is False, only the path to an ignored directory will
be yielded, no files inside the directory will be returned
"""
+ if untracked_files == "normal":
+ raise NotImplementedError("normal is not yet supported")
+
+ if untracked_files not in ("no", "all"):
+ raise ValueError("untracked_files must be one of (no, all)")
+
+ if untracked_files == "no":
+ return
+
with open_repo_closing(basepath) as r:
ignore_manager = IgnoreFilterManager.from_repo(r)
if not is_dir:
ip = path_to_tree_path(basepath, ap)
if ip not in index:
- if (
- not exclude_ignored
- or not ignore_manager.is_ignored(
- os.path.relpath(ap, basepath)
- )
+ if not exclude_ignored or not ignore_manager.is_ignored(
+ os.path.relpath(ap, basepath)
):
yield os.path.relpath(ap, frompath)
list_tree(r.object_store, tree.id, "")
-def remote_add(repo, name, url):
+def remote_add(repo: Repo, name: Union[bytes, str], url: Union[bytes, str]):
"""Add a remote.
Args:
if c.has_section(section):
raise RemoteExists(section)
c.set(section, b"url", url)
+ c.write_to_path()
+
+
+def remote_remove(repo: Repo, name: Union[bytes, str]):
+ """Remove a remote
+
+ Args:
+ repo: Path to the repository
+ name: Remote name
+ """
+ if not isinstance(name, bytes):
+ name = name.encode(DEFAULT_ENCODING)
+ with open_repo_closing(repo) as r:
+ c = r.get_config()
+ section = (b"remote", name)
+ del c[section]
c.write_to_path()
blob - 317475c5be36785854bcea0689a3ce26b40d8523
blob + 1a1fc547e1d0a4f8c074ca2bbc52ff21d66589a7
--- dulwich/refs.py
+++ dulwich/refs.py
def import_refs(
self,
- base,
- other,
- committer=None,
- timestamp=None,
- timezone=None,
- message=None,
- prune=False,
+ base: bytes,
+ other: Dict[bytes, bytes],
+ committer: Optional[bytes] = None,
+ timestamp: Optional[bytes] = None,
+ timezone: Optional[bytes] = None,
+ message: Optional[bytes] = None,
+ prune: bool = False,
):
if prune:
to_delete = set(self.subkeys(base))
ret[src] = dst
return ret
- def watch(self):
- """Watch for changes to the refs in this container.
- Returns a context manager that yields tuples with (refname, new_sha)
- """
- raise NotImplementedError(self.watch)
-
-
-class _DictRefsWatcher(object):
- def __init__(self, refs):
- self._refs = refs
-
- def __enter__(self):
- from queue import Queue
-
- self.queue = Queue()
- self._refs._watchers.add(self)
- return self
-
- def __next__(self):
- return self.queue.get()
-
- def _notify(self, entry):
- self.queue.put_nowait(entry)
-
- def __exit__(self, exc_type, exc_val, exc_tb):
- self._refs._watchers.remove(self)
- return False
-
-
class DictRefsContainer(RefsContainer):
"""RefsContainer backed by a simple dict.
for watcher in self._watchers:
watcher._notify((ref, newsha))
- def watch(self):
- return _DictRefsWatcher(self)
-
def set_symbolic_ref(
self,
name,
return self._refs[name]
-class _InotifyRefsWatcher(object):
- def __init__(self, path):
- import pyinotify
- from queue import Queue
-
- self.path = os.fsdecode(path)
- self.manager = pyinotify.WatchManager()
- self.manager.add_watch(
- self.path,
- pyinotify.IN_DELETE | pyinotify.IN_CLOSE_WRITE | pyinotify.IN_MOVED_TO,
- rec=True,
- auto_add=True,
- )
-
- self.notifier = pyinotify.ThreadedNotifier(
- self.manager, default_proc_fun=self._notify
- )
- self.queue = Queue()
-
- def _notify(self, event):
- if event.dir:
- return
- if event.pathname.endswith(".lock"):
- return
- ref = os.fsencode(os.path.relpath(event.pathname, self.path))
- if event.maskname == "IN_DELETE":
- self.queue.put_nowait((ref, None))
- elif event.maskname in ("IN_CLOSE_WRITE", "IN_MOVED_TO"):
- with open(event.pathname, "rb") as f:
- sha = f.readline().rstrip(b"\n\r")
- self.queue.put_nowait((ref, sha))
-
- def __next__(self):
- return self.queue.get()
-
- def __enter__(self):
- self.notifier.start()
- return self
-
- def __exit__(self, exc_type, exc_val, exc_tb):
- self.notifier.stop()
- return False
-
-
class DiskRefsContainer(RefsContainer):
"""Refs container that reads refs from disk."""
return True
- def watch(self):
- import pyinotify # noqa: F401
- return _InotifyRefsWatcher(self.path)
-
-
def _split_ref_line(line):
"""Split a single ref line into a tuple of SHA1 and name."""
fields = line.rstrip(b"\n\r").split(b" ")
blob - 7c3c3a782b9b206c29f6311985c4f63e279b9ca9
blob + 80c2ad575723a1452ebcc6bd4b34a2fe267fc7e1
--- dulwich/repo.py
+++ dulwich/repo.py
bare (bool): Whether this is a bare repository
"""
- def __init__(self, root, object_store=None, bare=None):
+ def __init__(
+ self,
+ root: str,
+ object_store: Optional[BaseObjectStore] = None,
+ bare: Optional[bool] = None
+ ) -> None:
hidden_path = os.path.join(root, CONTROLDIR)
if bare is None:
if (os.path.isfile(hidden_path) or
self.path = root
config = self.get_config()
try:
- format_version = int(config.get("core", "repositoryformatversion"))
+ repository_format_version = config.get(
+ "core",
+ "repositoryformatversion"
+ )
+ format_version = (
+ 0
+ if repository_format_version is None
+ else int(repository_format_version)
+ )
except KeyError:
format_version = 0
+
if format_version != 0:
raise UnsupportedVersion(format_version)
if object_store is None:
raise
return target
- def reset_index(self, tree=None):
+ def reset_index(self, tree: Optional[Tree] = None):
"""Reset the index back to a specific tree.
Args:
return ret
@classmethod
- def init(cls, path, mkdir=False):
+ def init(cls, path: str, mkdir: bool = False) -> "Repo":
"""Create a new repository.
Args:
blob - 97d835311f22e66de92b4f572277fdfab58fdacc
blob + 2f9cad482119221abd0c24c55b4ca10bc7b7726b
--- dulwich/tests/compat/test_client.py
+++ dulwich/tests/compat/test_client.py
try:
nbytes = int(length)
except (TypeError, ValueError):
- nbytes = 0
- if self.command.lower() == "post" and nbytes > 0:
- data = self.rfile.read(nbytes)
+ nbytes = -1
+ if self.command.lower() == "post":
+ if nbytes > 0:
+ data = self.rfile.read(nbytes)
+ elif self.headers.get('transfer-encoding') == 'chunked':
+ chunks = []
+ while True:
+ line = self.rfile.readline()
+ length = int(line.rstrip(), 16)
+ chunk = self.rfile.read(length + 2)
+ chunks.append(chunk[:-2])
+ if length == 0:
+ break
+ data = b''.join(chunks)
+ env["CONTENT_LENGTH"] = str(len(data))
+ else:
+ raise AssertionError
else:
data = None
env["CONTENT_LENGTH"] = "0"
blob - 55acf585a0255c3b05b8be4231d4125db3889cdb
blob + 12f62d15e3ef2f408dc60a906e06e474720ff45a
--- dulwich/tests/test_client.py
+++ dulwich/tests/test_client.py
quote as urlquote,
urlparse,
)
+
+from unittest.mock import patch
import dulwich
from dulwich import (
self.assertIsInstance(c, HttpGitClient)
self.assertEqual("/jelmer/dulwich", path)
+ @patch("os.name", "posix")
+ @patch("sys.platform", "linux")
def test_file(self):
c, path = get_transport_and_path_from_url("file:///home/jelmer/foo")
self.assertIsInstance(c, LocalGitClient)
self.assertEqual("/home/jelmer/foo", path)
+ @patch("os.name", "nt")
+ @patch("sys.platform", "win32")
+ def test_file_win(self):
+ # `_win32_url_to_path` uses urllib.request.url2pathname, which is set to
+ # `ntutl2path.url2pathname` when `os.name==nt`
+ from nturl2path import url2pathname
+ with patch("dulwich.client.url2pathname", url2pathname):
+ expected = "C:\\foo.bar\\baz"
+ for file_url in [
+ "file:C:/foo.bar/baz",
+ "file:/C:/foo.bar/baz",
+ "file://C:/foo.bar/baz",
+ "file://C://foo.bar//baz",
+ "file:///C:/foo.bar/baz",
+ ]:
+ c, path = get_transport_and_path(file_url)
+ self.assertIsInstance(c, LocalGitClient)
+ self.assertEqual(path, expected)
+
+ for remote_url in [
+ "file://host.example.com/C:/foo.bar/baz"
+ "file://host.example.com/C:/foo.bar/baz"
+ "file:////host.example/foo.bar/baz",
+ ]:
+ with self.assertRaises(NotImplementedError):
+ c, path = get_transport_and_path(remote_url)
+
+
class TestSSHVendor(object):
def __init__(self):
self.host = None
def __init__(self):
self.headers = {}
- def request(self, method, url, fields=None, headers=None, redirect=True):
+ def request(self, method, url, fields=None, headers=None, redirect=True, preload_content=True):
base_url = url[: -len(tail)]
redirect_base_url = test_data[base_url]["redirect_url"]
redirect_url = redirect_base_url + tail
if redirect is False:
request_url = url
if redirect_base_url != base_url:
- body = ""
+ body = b""
headers["location"] = redirect_url
status = 301
return HTTPResponse(
- body=body,
+ body=BytesIO(body),
headers=headers,
request_method=method,
request_url=request_url,
+ preload_content=preload_content,
status=status,
)
blob - a1d37c0f96ce73b10dd47fda9e180499afedb4ee
blob + 201669ebff9d296ddd2ea89404ca02dfa5debb6e
--- dulwich/tests/test_porcelain.py
+++ dulwich/tests/test_porcelain.py
self.repo_path = os.path.join(self.test_dir, "repo")
self.repo = Repo.init(self.repo_path, mkdir=True)
self.addCleanup(self.repo.close)
+
+ def assertRecentTimestamp(self, ts):
+ # On some slow CIs it does actually take more than 5 seconds to go from
+ # creating the tag to here.
+ self.assertLess(time.time() - ts, 50)
class PorcelainGpgTestCase(PorcelainTestCase):
@skipIf(platform.python_implementation() == "PyPy" or sys.platform == "win32", "gpgme not easily available or supported on Windows and PyPy")
class TagCreateSignTests(PorcelainGpgTestCase):
+
def test_default_key(self):
import gpg
self.assertIsInstance(tag, Tag)
self.assertEqual(b"foo <foo@bar.com>", tag.tagger)
self.assertEqual(b"bar\n", tag.message)
- self.assertLess(time.time() - tag.tag_time, 5)
+ self.assertRecentTimestamp(tag.tag_time)
tag = self.repo[b'refs/tags/tryme']
# GPG Signatures aren't deterministic, so we can't do a static assertion.
tag.verify()
self.assertIsInstance(tag, Tag)
self.assertEqual(b"foo <foo@bar.com>", tag.tagger)
self.assertEqual(b"bar\n", tag.message)
- self.assertLess(time.time() - tag.tag_time, 5)
+ self.assertRecentTimestamp(tag.tag_time)
tag = self.repo[b'refs/tags/tryme']
# GPG Signatures aren't deterministic, so we can't do a static assertion.
tag.verify()
class TagCreateTests(PorcelainTestCase):
+
def test_annotated(self):
c1, c2, c3 = build_commit_graph(
self.repo.object_store, [[1], [2, 1], [3, 1, 2]]
self.assertIsInstance(tag, Tag)
self.assertEqual(b"foo <foo@bar.com>", tag.tagger)
self.assertEqual(b"bar\n", tag.message)
- self.assertLess(time.time() - tag.tag_time, 5)
+ self.assertRecentTimestamp(tag.tag_time)
def test_unannotated(self):
c1, c2, c3 = build_commit_graph(
results.staged,
)
self.assertListEqual(results.unstaged, [b"blye"])
- self.assertListEqual(results.untracked, ["blyat"])
+ results_no_untracked = porcelain.status(self.repo.path, untracked_files="no")
+ self.assertListEqual(results_no_untracked.untracked, [])
+
+ def test_status_wrong_untracked_files_value(self):
+ with self.assertRaises(ValueError):
+ porcelain.status(self.repo.path, untracked_files="antani")
def test_status_crlf_mismatch(self):
# First make a commit as if the file has been added on a Linux system
)
)
)
+
+ def test_get_untracked_paths_invalid_untracked_files(self):
+ with self.assertRaises(ValueError):
+ list(
+ porcelain.get_untracked_paths(
+ self.repo.path,
+ self.repo.path,
+ self.repo.open_index(),
+ untracked_files="invalid_value",
+ )
+ )
+ def test_get_untracked_paths_normal(self):
+ with self.assertRaises(NotImplementedError):
+ _, _, _ = porcelain.status(
+ repo=self.repo.path, untracked_files="normal"
+ )
# TODO(jelmer): Add test for dulwich.porcelain.daemon
self.repo,
"jelmer",
"git://jelmer.uk/code/dulwich",
+ )
+
+
+class RemoteRemoveTests(PorcelainTestCase):
+ def test_remove(self):
+ porcelain.remote_add(self.repo, "jelmer", "git://jelmer.uk/code/dulwich")
+ c = self.repo.get_config()
+ self.assertEqual(
+ c.get((b"remote", b"jelmer"), b"url"),
+ b"git://jelmer.uk/code/dulwich",
)
+ porcelain.remote_remove(self.repo, "jelmer")
+ self.assertRaises(KeyError, porcelain.remote_remove, self.repo, "jelmer")
+ c = self.repo.get_config()
+ self.assertRaises(KeyError, c.get, (b"remote", b"jelmer"), b"url")
class CheckIgnoreTests(PorcelainTestCase):
blob - 6ad74c6c247e4be21331a85367dfc8747343fea2
blob + 47e38a2eb776e39b8b38ad685df9bd3adcc4cc3c
--- dulwich/tests/test_refs.py
+++ dulwich/tests/test_refs.py
self._refs[b"refs/remotes/origin/master"],
)
self.assertNotIn(b"refs/remotes/origin/other", self._refs)
-
- def test_watch(self):
- try:
- watcher = self._refs.watch()
- except (NotImplementedError, ImportError):
- self.skipTest("watching not supported")
- with watcher:
- self._refs[
- b"refs/remotes/origin/other"
- ] = b"48d01bd4b77fed026b154d16493e5deab78f02ec"
- change = next(watcher)
- self.assertEqual(
- (
- b"refs/remotes/origin/other",
- b"48d01bd4b77fed026b154d16493e5deab78f02ec",
- ),
- change,
- )
- self._refs[
- b"refs/remotes/origin/other"
- ] = b"48d01bd4b77fed026b154d16493e5deab78f02ed"
- change = next(watcher)
- self.assertEqual(
- (
- b"refs/remotes/origin/other",
- b"48d01bd4b77fed026b154d16493e5deab78f02ed",
- ),
- change,
- )
- del self._refs[b"refs/remotes/origin/other"]
- change = next(watcher)
- self.assertEqual((b"refs/remotes/origin/other", None), change)
class DictRefsContainerTests(RefsContainerTests, TestCase):
blob - 898d25899cc09937c040d2f8c9a1e9fa914bee87
blob + 05c4d30ee39bfb34a2b0d627d8c71c5e1575b545
--- dulwich.egg-info/PKG-INFO
+++ dulwich.egg-info/PKG-INFO
Metadata-Version: 2.1
Name: dulwich
-Version: 0.20.35
+Version: 0.20.42
Summary: Python Git Library
Home-page: https://www.dulwich.io/
Author: Jelmer Vernooij
Requires-Python: >=3.6
Provides-Extra: fastimport
Provides-Extra: https
+Provides-Extra: paramiko
Provides-Extra: pgp
-Provides-Extra: watch
License-File: COPYING
License-File: AUTHORS
----
There is a *#dulwich* IRC channel on the `OFTC <https://www.oftc.net/>`_, and
-`dulwich-announce <https://groups.google.com/forum/#!forum/dulwich-announce>`_
-and `dulwich-discuss <https://groups.google.com/forum/#!forum/dulwich-discuss>`_
-mailing lists.
+a `dulwich-discuss <https://groups.google.com/forum/#!forum/dulwich-discuss>`_
+mailing list.
Contributing
------------
blob - df6f41c058c8948945b84cd38845c17eb4f85c28
blob + 0ced704f94567aab055500a6215305a5a139b053
--- dulwich.egg-info/SOURCES.txt
+++ dulwich.egg-info/SOURCES.txt
tox.ini
.github/FUNDING.yml
.github/workflows/pythonpackage.yml
-.github/workflows/pythonpublish.yml
+.github/workflows/pythonwheels.yml
bin/dul-receive-pack
bin/dul-upload-pack
bin/dulwich
blob - 0c74d2db401a949bcb8fb3ed6ebdded17ebbfdbd
blob + 480a2c67cf189b71abcf69eaa147f9f4f0259abf
--- dulwich.egg-info/requires.txt
+++ dulwich.egg-info/requires.txt
[https]
urllib3[secure]>=1.24.1
+[paramiko]
+paramiko
+
[pgp]
gpg
-
-[watch]
-pyinotify
blob - c1eed47566a5bedae1bc21014b88862d3edb217f
blob + 161936c0cc6aefb7cb25f7a604fd063c0a0b952c
--- setup.py
+++ setup.py
'For 2.7 support, please install a version prior to 0.20')
-dulwich_version_string = '0.20.35'
+dulwich_version_string = '0.20.42'
class DulwichDistribution(Distribution):
'fastimport': ['fastimport'],
'https': ['urllib3[secure]>=1.24.1'],
'pgp': ['gpg'],
- 'watch': ['pyinotify'],
+ 'paramiko': ['paramiko'],
}
setup_kwargs['install_requires'] = ['urllib3>=1.24.1', 'certifi']
setup_kwargs['include_package_data'] = True