Merge lp:~jelmer/brz/misc-foreign3 into lp:~jelmer/brz/foreign

Proposed by Jelmer Vernooij
Status: Merged
Merged at revision: 6849
Proposed branch: lp:~jelmer/brz/misc-foreign3
Merge into: lp:~jelmer/brz/foreign
Diff against target: 1419 lines (+418/-372) (has conflicts)
19 files modified
breezy/bzr/bzrdir.py (+25/-112)
breezy/bzr/remote.py (+2/-2)
breezy/bzr/workingtree.py (+124/-0)
breezy/controldir.py (+96/-0)
breezy/log.py (+12/-12)
breezy/tests/per_branch/test_push.py (+13/-16)
breezy/tests/per_branch/test_tags.py (+13/-11)
breezy/tests/per_controldir/test_controldir.py (+2/-0)
breezy/tests/per_intertree/test_compare.py (+14/-13)
breezy/tests/per_repository/test_fetch.py (+10/-11)
breezy/tests/per_repository/test_repository.py (+26/-5)
breezy/tests/per_tree/test_annotate_iter.py (+2/-0)
breezy/tests/per_workingtree/test_add.py (+2/-0)
breezy/tests/per_workingtree/test_annotate_iter.py (+57/-57)
breezy/tests/per_workingtree/test_get_file_mtime.py (+1/-1)
breezy/tests/per_workingtree/test_set_root_id.py (+4/-0)
breezy/tests/per_workingtree/test_walkdirs.py (+4/-2)
breezy/tests/per_workingtree/test_workingtree.py (+6/-5)
breezy/workingtree.py (+5/-125)
Text conflict in breezy/bzr/bzrdir.py
Text conflict in breezy/tests/per_repository/test_repository.py
To merge this branch: bzr merge lp:~jelmer/brz/misc-foreign3
Reviewer Review Type Date Requested Status
Martin Packman (community) Approve
Jelmer Vernooij Pending
Review via email: mp+334443@code.launchpad.net

Description of the change

Avoid specifying revision_id/file_id in a few more cases.

To post a comment you must log in.
Revision history for this message
Martin Packman (gz) wrote :

There are a stack of commits (and some conflicts) in the branch here.

Looking only at the last rev, changes seems good.

review: Approve

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
=== modified file 'breezy/bzr/bzrdir.py'
--- breezy/bzr/bzrdir.py 2017-11-20 22:56:39 +0000
+++ breezy/bzr/bzrdir.py 2017-11-29 12:08:31 +0000
@@ -305,9 +305,9 @@
305 return policy305 return policy
306 else:306 else:
307 try:307 try:
308 return UseExistingRepository(self.open_repository(),308 return UseExistingRepository(
309 stack_on, stack_on_pwd,309 self.open_repository(), stack_on, stack_on_pwd,
310 require_stacking=require_stacking)310 require_stacking=require_stacking)
311 except errors.NoRepositoryPresent:311 except errors.NoRepositoryPresent:
312 pass312 pass
313 return CreateRepository(self, stack_on, stack_on_pwd,313 return CreateRepository(self, stack_on, stack_on_pwd,
@@ -1737,14 +1737,22 @@
1737 except errors.NoRepositoryPresent:1737 except errors.NoRepositoryPresent:
1738 pass1738 pass
1739 else:1739 else:
1740 if not isinstance(repo._format, self.target_format.repository_format.__class__):1740 repo_fmt = self.target_format.repository_format
1741 if not isinstance(repo._format, repo_fmt.__class__):
1741 from ..repository import CopyConverter1742 from ..repository import CopyConverter
1742 ui.ui_factory.note(gettext('starting repository conversion'))1743 ui.ui_factory.note(gettext('starting repository conversion'))
1744<<<<<<< TREE
1743 if not (self.target_format.1745 if not (self.target_format.
1744 repository_format.supports_overriding_transport):1746 repository_format.supports_overriding_transport):
1745 raise AssertionError(1747 raise AssertionError(
1746 "Repository in metadir does not support "1748 "Repository in metadir does not support "
1747 "overriding transport")1749 "overriding transport")
1750=======
1751 if not repo_fmt.supports_overriding_transport:
1752 raise AssertionError(
1753 "Repository in metadir does not support "
1754 "overriding transport")
1755>>>>>>> MERGE-SOURCE
1748 converter = CopyConverter(self.target_format.repository_format)1756 converter = CopyConverter(self.target_format.repository_format)
1749 converter.convert(repo, pb)1757 converter.convert(repo, pb)
1750 for branch in self.controldir.list_branches():1758 for branch in self.controldir.list_branches():
@@ -1833,138 +1841,43 @@
1833 return BzrDir.open_from_transport(to_convert.root_transport)1841 return BzrDir.open_from_transport(to_convert.root_transport)
18341842
18351843
1836class RepositoryAcquisitionPolicy(object):1844class CreateRepository(controldir.RepositoryAcquisitionPolicy):
1837 """Abstract base class for repository acquisition policies.
1838
1839 A repository acquisition policy decides how a BzrDir acquires a repository
1840 for a branch that is being created. The most basic policy decision is
1841 whether to create a new repository or use an existing one.
1842 """
1843 def __init__(self, stack_on, stack_on_pwd, require_stacking):
1844 """Constructor.
1845
1846 :param stack_on: A location to stack on
1847 :param stack_on_pwd: If stack_on is relative, the location it is
1848 relative to.
1849 :param require_stacking: If True, it is a failure to not stack.
1850 """
1851 self._stack_on = stack_on
1852 self._stack_on_pwd = stack_on_pwd
1853 self._require_stacking = require_stacking
1854
1855 def configure_branch(self, branch):
1856 """Apply any configuration data from this policy to the branch.
1857
1858 Default implementation sets repository stacking.
1859 """
1860 if self._stack_on is None:
1861 return
1862 if self._stack_on_pwd is None:
1863 stack_on = self._stack_on
1864 else:
1865 try:
1866 stack_on = urlutils.rebase_url(self._stack_on,
1867 self._stack_on_pwd,
1868 branch.user_url)
1869 except urlutils.InvalidRebaseURLs:
1870 stack_on = self._get_full_stack_on()
1871 try:
1872 branch.set_stacked_on_url(stack_on)
1873 except (_mod_branch.UnstackableBranchFormat,
1874 errors.UnstackableRepositoryFormat):
1875 if self._require_stacking:
1876 raise
1877
1878 def requires_stacking(self):
1879 """Return True if this policy requires stacking."""
1880 return self._stack_on is not None and self._require_stacking
1881
1882 def _get_full_stack_on(self):
1883 """Get a fully-qualified URL for the stack_on location."""
1884 if self._stack_on is None:
1885 return None
1886 if self._stack_on_pwd is None:
1887 return self._stack_on
1888 else:
1889 return urlutils.join(self._stack_on_pwd, self._stack_on)
1890
1891 def _add_fallback(self, repository, possible_transports=None):
1892 """Add a fallback to the supplied repository, if stacking is set."""
1893 stack_on = self._get_full_stack_on()
1894 if stack_on is None:
1895 return
1896 try:
1897 stacked_dir = BzrDir.open(stack_on,
1898 possible_transports=possible_transports)
1899 except errors.JailBreak:
1900 # We keep the stacking details, but we are in the server code so
1901 # actually stacking is not needed.
1902 return
1903 try:
1904 stacked_repo = stacked_dir.open_branch().repository
1905 except errors.NotBranchError:
1906 stacked_repo = stacked_dir.open_repository()
1907 try:
1908 repository.add_fallback_repository(stacked_repo)
1909 except errors.UnstackableRepositoryFormat:
1910 if self._require_stacking:
1911 raise
1912 else:
1913 self._require_stacking = True
1914
1915 def acquire_repository(self, make_working_trees=None, shared=False,
1916 possible_transports=None):
1917 """Acquire a repository for this bzrdir.
1918
1919 Implementations may create a new repository or use a pre-exising
1920 repository.
1921
1922 :param make_working_trees: If creating a repository, set
1923 make_working_trees to this value (if non-None)
1924 :param shared: If creating a repository, make it shared if True
1925 :return: A repository, is_new_flag (True if the repository was
1926 created).
1927 """
1928 raise NotImplementedError(RepositoryAcquisitionPolicy.acquire_repository)
1929
1930
1931class CreateRepository(RepositoryAcquisitionPolicy):
1932 """A policy of creating a new repository"""1845 """A policy of creating a new repository"""
19331846
1934 def __init__(self, bzrdir, stack_on=None, stack_on_pwd=None,1847 def __init__(self, controldir, stack_on=None, stack_on_pwd=None,
1935 require_stacking=False):1848 require_stacking=False):
1936 """Constructor.1849 """Constructor.
19371850
1938 :param bzrdir: The bzrdir to create the repository on.1851 :param controldir: The controldir to create the repository on.
1939 :param stack_on: A location to stack on1852 :param stack_on: A location to stack on
1940 :param stack_on_pwd: If stack_on is relative, the location it is1853 :param stack_on_pwd: If stack_on is relative, the location it is
1941 relative to.1854 relative to.
1942 """1855 """
1943 RepositoryAcquisitionPolicy.__init__(self, stack_on, stack_on_pwd,1856 super(CreateRepository, self).__init__(
1944 require_stacking)1857 stack_on, stack_on_pwd, require_stacking)
1945 self._bzrdir = bzrdir1858 self._controldir = controldir
19461859
1947 def acquire_repository(self, make_working_trees=None, shared=False,1860 def acquire_repository(self, make_working_trees=None, shared=False,
1948 possible_transports=None):1861 possible_transports=None):
1949 """Implementation of RepositoryAcquisitionPolicy.acquire_repository1862 """Implementation of RepositoryAcquisitionPolicy.acquire_repository
19501863
1951 Creates the desired repository in the bzrdir we already have.1864 Creates the desired repository in the controldir we already have.
1952 """1865 """
1953 if possible_transports is None:1866 if possible_transports is None:
1954 possible_transports = []1867 possible_transports = []
1955 else:1868 else:
1956 possible_transports = list(possible_transports)1869 possible_transports = list(possible_transports)
1957 possible_transports.append(self._bzrdir.root_transport)1870 possible_transports.append(self._controldir.root_transport)
1958 stack_on = self._get_full_stack_on()1871 stack_on = self._get_full_stack_on()
1959 if stack_on:1872 if stack_on:
1960 format = self._bzrdir._format1873 format = self._controldir._format
1961 format.require_stacking(stack_on=stack_on,1874 format.require_stacking(stack_on=stack_on,
1962 possible_transports=possible_transports)1875 possible_transports=possible_transports)
1963 if not self._require_stacking:1876 if not self._require_stacking:
1964 # We have picked up automatic stacking somewhere.1877 # We have picked up automatic stacking somewhere.
1965 note(gettext('Using default stacking branch {0} at {1}').format(1878 note(gettext('Using default stacking branch {0} at {1}').format(
1966 self._stack_on, self._stack_on_pwd))1879 self._stack_on, self._stack_on_pwd))
1967 repository = self._bzrdir.create_repository(shared=shared)1880 repository = self._controldir.create_repository(shared=shared)
1968 self._add_fallback(repository,1881 self._add_fallback(repository,
1969 possible_transports=possible_transports)1882 possible_transports=possible_transports)
1970 if make_working_trees is not None:1883 if make_working_trees is not None:
@@ -1972,7 +1885,7 @@
1972 return repository, True1885 return repository, True
19731886
19741887
1975class UseExistingRepository(RepositoryAcquisitionPolicy):1888class UseExistingRepository(controldir.RepositoryAcquisitionPolicy):
1976 """A policy of reusing an existing repository"""1889 """A policy of reusing an existing repository"""
19771890
1978 def __init__(self, repository, stack_on=None, stack_on_pwd=None,1891 def __init__(self, repository, stack_on=None, stack_on_pwd=None,
@@ -1984,8 +1897,8 @@
1984 :param stack_on_pwd: If stack_on is relative, the location it is1897 :param stack_on_pwd: If stack_on is relative, the location it is
1985 relative to.1898 relative to.
1986 """1899 """
1987 RepositoryAcquisitionPolicy.__init__(self, stack_on, stack_on_pwd,1900 super(UseExistingRepository, self).__init__(
1988 require_stacking)1901 stack_on, stack_on_pwd, require_stacking)
1989 self._repository = repository1902 self._repository = repository
19901903
1991 def acquire_repository(self, make_working_trees=None, shared=False,1904 def acquire_repository(self, make_working_trees=None, shared=False,
19921905
=== modified file 'breezy/bzr/remote.py'
--- breezy/bzr/remote.py 2017-11-20 22:51:10 +0000
+++ breezy/bzr/remote.py 2017-11-29 12:08:31 +0000
@@ -308,8 +308,8 @@
308 remote_repo.dont_leave_lock_in_place()308 remote_repo.dont_leave_lock_in_place()
309 else:309 else:
310 remote_repo.lock_write()310 remote_repo.lock_write()
311 policy = _mod_bzrdir.UseExistingRepository(remote_repo, final_stack,311 policy = _mod_bzrdir.UseExistingRepository(remote_repo,
312 final_stack_pwd, require_stacking)312 final_stack, final_stack_pwd, require_stacking)
313 policy.acquire_repository()313 policy.acquire_repository()
314 else:314 else:
315 remote_repo = None315 remote_repo = None
316316
=== modified file 'breezy/bzr/workingtree.py'
--- breezy/bzr/workingtree.py 2017-11-19 18:10:24 +0000
+++ breezy/bzr/workingtree.py 2017-11-29 12:08:31 +0000
@@ -32,9 +32,12 @@
3232
33from __future__ import absolute_import33from __future__ import absolute_import
3434
35from bisect import bisect_left
35import breezy36import breezy
36import collections37import collections
37import errno38import errno
39import itertools
40import operator
38import os41import os
39import stat42import stat
40import sys43import sys
@@ -1501,6 +1504,127 @@
1501 subp = osutils.pathjoin(path, subf)1504 subp = osutils.pathjoin(path, subf)
1502 yield subp1505 yield subp
15031506
1507 def walkdirs(self, prefix=""):
1508 """Walk the directories of this tree.
1509
1510 returns a generator which yields items in the form:
1511 ((curren_directory_path, fileid),
1512 [(file1_path, file1_name, file1_kind, (lstat), file1_id,
1513 file1_kind), ... ])
1514
1515 This API returns a generator, which is only valid during the current
1516 tree transaction - within a single lock_read or lock_write duration.
1517
1518 If the tree is not locked, it may cause an error to be raised,
1519 depending on the tree implementation.
1520 """
1521 disk_top = self.abspath(prefix)
1522 if disk_top.endswith('/'):
1523 disk_top = disk_top[:-1]
1524 top_strip_len = len(disk_top) + 1
1525 inventory_iterator = self._walkdirs(prefix)
1526 disk_iterator = osutils.walkdirs(disk_top, prefix)
1527 try:
1528 current_disk = next(disk_iterator)
1529 disk_finished = False
1530 except OSError as e:
1531 if not (e.errno == errno.ENOENT or
1532 (sys.platform == 'win32' and e.errno == ERROR_PATH_NOT_FOUND)):
1533 raise
1534 current_disk = None
1535 disk_finished = True
1536 try:
1537 current_inv = next(inventory_iterator)
1538 inv_finished = False
1539 except StopIteration:
1540 current_inv = None
1541 inv_finished = True
1542 while not inv_finished or not disk_finished:
1543 if current_disk:
1544 ((cur_disk_dir_relpath, cur_disk_dir_path_from_top),
1545 cur_disk_dir_content) = current_disk
1546 else:
1547 ((cur_disk_dir_relpath, cur_disk_dir_path_from_top),
1548 cur_disk_dir_content) = ((None, None), None)
1549 if not disk_finished:
1550 # strip out .bzr dirs
1551 if (cur_disk_dir_path_from_top[top_strip_len:] == '' and
1552 len(cur_disk_dir_content) > 0):
1553 # osutils.walkdirs can be made nicer -
1554 # yield the path-from-prefix rather than the pathjoined
1555 # value.
1556 bzrdir_loc = bisect_left(cur_disk_dir_content,
1557 ('.bzr', '.bzr'))
1558 if (bzrdir_loc < len(cur_disk_dir_content)
1559 and self.controldir.is_control_filename(
1560 cur_disk_dir_content[bzrdir_loc][0])):
1561 # we dont yield the contents of, or, .bzr itself.
1562 del cur_disk_dir_content[bzrdir_loc]
1563 if inv_finished:
1564 # everything is unknown
1565 direction = 1
1566 elif disk_finished:
1567 # everything is missing
1568 direction = -1
1569 else:
1570 direction = cmp(current_inv[0][0], cur_disk_dir_relpath)
1571 if direction > 0:
1572 # disk is before inventory - unknown
1573 dirblock = [(relpath, basename, kind, stat, None, None) for
1574 relpath, basename, kind, stat, top_path in
1575 cur_disk_dir_content]
1576 yield (cur_disk_dir_relpath, None), dirblock
1577 try:
1578 current_disk = next(disk_iterator)
1579 except StopIteration:
1580 disk_finished = True
1581 elif direction < 0:
1582 # inventory is before disk - missing.
1583 dirblock = [(relpath, basename, 'unknown', None, fileid, kind)
1584 for relpath, basename, dkind, stat, fileid, kind in
1585 current_inv[1]]
1586 yield (current_inv[0][0], current_inv[0][1]), dirblock
1587 try:
1588 current_inv = next(inventory_iterator)
1589 except StopIteration:
1590 inv_finished = True
1591 else:
1592 # versioned present directory
1593 # merge the inventory and disk data together
1594 dirblock = []
1595 for relpath, subiterator in itertools.groupby(sorted(
1596 current_inv[1] + cur_disk_dir_content,
1597 key=operator.itemgetter(0)), operator.itemgetter(1)):
1598 path_elements = list(subiterator)
1599 if len(path_elements) == 2:
1600 inv_row, disk_row = path_elements
1601 # versioned, present file
1602 dirblock.append((inv_row[0],
1603 inv_row[1], disk_row[2],
1604 disk_row[3], inv_row[4],
1605 inv_row[5]))
1606 elif len(path_elements[0]) == 5:
1607 # unknown disk file
1608 dirblock.append((path_elements[0][0],
1609 path_elements[0][1], path_elements[0][2],
1610 path_elements[0][3], None, None))
1611 elif len(path_elements[0]) == 6:
1612 # versioned, absent file.
1613 dirblock.append((path_elements[0][0],
1614 path_elements[0][1], 'unknown', None,
1615 path_elements[0][4], path_elements[0][5]))
1616 else:
1617 raise NotImplementedError('unreachable code')
1618 yield current_inv[0], dirblock
1619 try:
1620 current_inv = next(inventory_iterator)
1621 except StopIteration:
1622 inv_finished = True
1623 try:
1624 current_disk = next(disk_iterator)
1625 except StopIteration:
1626 disk_finished = True
1627
1504 def _walkdirs(self, prefix=""):1628 def _walkdirs(self, prefix=""):
1505 """Walk the directories of this tree.1629 """Walk the directories of this tree.
15061630
15071631
=== modified file 'breezy/controldir.py'
--- breezy/controldir.py 2017-07-30 21:23:44 +0000
+++ breezy/controldir.py 2017-11-29 12:08:31 +0000
@@ -29,6 +29,7 @@
29import textwrap29import textwrap
3030
31from breezy import (31from breezy import (
32 branch as _mod_branch,
32 hooks,33 hooks,
33 revision as _mod_revision,34 revision as _mod_revision,
34 transport as _mod_transport,35 transport as _mod_transport,
@@ -1454,6 +1455,101 @@
1454 return filename == '.bzr'1455 return filename == '.bzr'
14551456
14561457
1458class RepositoryAcquisitionPolicy(object):
1459 """Abstract base class for repository acquisition policies.
1460
1461 A repository acquisition policy decides how a ControlDir acquires a repository
1462 for a branch that is being created. The most basic policy decision is
1463 whether to create a new repository or use an existing one.
1464 """
1465 def __init__(self, stack_on, stack_on_pwd, require_stacking):
1466 """Constructor.
1467
1468 :param stack_on: A location to stack on
1469 :param stack_on_pwd: If stack_on is relative, the location it is
1470 relative to.
1471 :param require_stacking: If True, it is a failure to not stack.
1472 """
1473 self._stack_on = stack_on
1474 self._stack_on_pwd = stack_on_pwd
1475 self._require_stacking = require_stacking
1476
1477 def configure_branch(self, branch):
1478 """Apply any configuration data from this policy to the branch.
1479
1480 Default implementation sets repository stacking.
1481 """
1482 if self._stack_on is None:
1483 return
1484 if self._stack_on_pwd is None:
1485 stack_on = self._stack_on
1486 else:
1487 try:
1488 stack_on = urlutils.rebase_url(self._stack_on,
1489 self._stack_on_pwd,
1490 branch.user_url)
1491 except urlutils.InvalidRebaseURLs:
1492 stack_on = self._get_full_stack_on()
1493 try:
1494 branch.set_stacked_on_url(stack_on)
1495 except (_mod_branch.UnstackableBranchFormat,
1496 errors.UnstackableRepositoryFormat):
1497 if self._require_stacking:
1498 raise
1499
1500 def requires_stacking(self):
1501 """Return True if this policy requires stacking."""
1502 return self._stack_on is not None and self._require_stacking
1503
1504 def _get_full_stack_on(self):
1505 """Get a fully-qualified URL for the stack_on location."""
1506 if self._stack_on is None:
1507 return None
1508 if self._stack_on_pwd is None:
1509 return self._stack_on
1510 else:
1511 return urlutils.join(self._stack_on_pwd, self._stack_on)
1512
1513 def _add_fallback(self, repository, possible_transports=None):
1514 """Add a fallback to the supplied repository, if stacking is set."""
1515 stack_on = self._get_full_stack_on()
1516 if stack_on is None:
1517 return
1518 try:
1519 stacked_dir = ControlDir.open(
1520 stack_on, possible_transports=possible_transports)
1521 except errors.JailBreak:
1522 # We keep the stacking details, but we are in the server code so
1523 # actually stacking is not needed.
1524 return
1525 try:
1526 stacked_repo = stacked_dir.open_branch().repository
1527 except errors.NotBranchError:
1528 stacked_repo = stacked_dir.open_repository()
1529 try:
1530 repository.add_fallback_repository(stacked_repo)
1531 except errors.UnstackableRepositoryFormat:
1532 if self._require_stacking:
1533 raise
1534 else:
1535 self._require_stacking = True
1536
1537 def acquire_repository(self, make_working_trees=None, shared=False,
1538 possible_transports=None):
1539 """Acquire a repository for this controlrdir.
1540
1541 Implementations may create a new repository or use a pre-exising
1542 repository.
1543
1544 :param make_working_trees: If creating a repository, set
1545 make_working_trees to this value (if non-None)
1546 :param shared: If creating a repository, make it shared if True
1547 :return: A repository, is_new_flag (True if the repository was
1548 created).
1549 """
1550 raise NotImplementedError(RepositoryAcquisitionPolicy.acquire_repository)
1551
1552
1457# Please register new formats after old formats so that formats1553# Please register new formats after old formats so that formats
1458# appear in chronological order and format descriptions can build1554# appear in chronological order and format descriptions can build
1459# on previous ones.1555# on previous ones.
14601556
=== modified file 'breezy/log.py'
--- breezy/log.py 2017-11-12 20:07:32 +0000
+++ breezy/log.py 2017-11-29 12:08:31 +0000
@@ -103,38 +103,38 @@
103 TODO: Perhaps some way to limit this to only particular revisions,103 TODO: Perhaps some way to limit this to only particular revisions,
104 or to traverse a non-mainline set of revisions?104 or to traverse a non-mainline set of revisions?
105 """105 """
106 last_ie = None106 last_verifier = None
107 last_path = None107 last_path = None
108 revno = 1108 revno = 1
109 graph = branch.repository.get_graph()109 graph = branch.repository.get_graph()
110 history = list(graph.iter_lefthand_ancestry(branch.last_revision(),110 history = list(graph.iter_lefthand_ancestry(branch.last_revision(),
111 [_mod_revision.NULL_REVISION]))111 [_mod_revision.NULL_REVISION]))
112 for revision_id in reversed(history):112 for revision_id in reversed(history):
113 this_inv = branch.repository.get_inventory(revision_id)113 this_tree = branch.repository.revision_tree(revision_id)
114 if this_inv.has_id(file_id):114 try:
115 this_ie = this_inv[file_id]115 this_path = this_tree.id2path(file_id)
116 this_path = this_inv.id2path(file_id)116 except errors.NoSuchId:
117 this_verifier = this_path = None
117 else:118 else:
118 this_ie = this_path = None119 this_verifier = this_tree.get_file_verifier(this_path, file_id)
119120
120 # now we know how it was last time, and how it is in this revision.121 # now we know how it was last time, and how it is in this revision.
121 # are those two states effectively the same or not?122 # are those two states effectively the same or not?
122123
123 if not this_ie and not last_ie:124 if not this_verifier and not last_verifier:
124 # not present in either125 # not present in either
125 pass126 pass
126 elif this_ie and not last_ie:127 elif this_verifier and not last_verifier:
127 yield revno, revision_id, "added " + this_path128 yield revno, revision_id, "added " + this_path
128 elif not this_ie and last_ie:129 elif not this_verifier and last_verifier:
129 # deleted here130 # deleted here
130 yield revno, revision_id, "deleted " + last_path131 yield revno, revision_id, "deleted " + last_path
131 elif this_path != last_path:132 elif this_path != last_path:
132 yield revno, revision_id, ("renamed %s => %s" % (last_path, this_path))133 yield revno, revision_id, ("renamed %s => %s" % (last_path, this_path))
133 elif (this_ie.text_size != last_ie.text_size134 elif (this_verifier != last_verifier):
134 or this_ie.text_sha1 != last_ie.text_sha1):
135 yield revno, revision_id, "modified " + this_path135 yield revno, revision_id, "modified " + this_path
136136
137 last_ie = this_ie137 last_verifier = this_verifier
138 last_path = this_path138 last_path = this_path
139 revno += 1139 revno += 1
140140
141141
=== modified file 'breezy/tests/per_branch/test_push.py'
--- breezy/tests/per_branch/test_push.py 2017-11-16 00:39:04 +0000
+++ breezy/tests/per_branch/test_push.py 2017-11-29 12:08:31 +0000
@@ -237,11 +237,10 @@
237 except errors.UninitializableFormat:237 except errors.UninitializableFormat:
238 raise tests.TestNotApplicable('cannot initialize this format')238 raise tests.TestNotApplicable('cannot initialize this format')
239 source.start_series()239 source.start_series()
240 source.build_snapshot(None, [240 revid_a = source.build_snapshot(None, [
241 ('add', ('', 'root-id', 'directory', None))],241 ('add', ('', 'root-id', 'directory', None))])
242 revision_id='A')242 revid_b = source.build_snapshot([revid_a], [])
243 source.build_snapshot(['A'], [], revision_id='B')243 revid_c = source.build_snapshot([revid_a], [])
244 source.build_snapshot(['A'], [], revision_id='C')
245 source.finish_series()244 source.finish_series()
246 b = source.get_branch()245 b = source.get_branch()
247 # Note: We can't read lock the source branch. Some formats take a write246 # Note: We can't read lock the source branch. Some formats take a write
@@ -251,9 +250,9 @@
251 # This means 'push the source branch into this dir'250 # This means 'push the source branch into this dir'
252 bzrdir.push_branch(b)251 bzrdir.push_branch(b)
253 self.addCleanup(repo.lock_read().unlock)252 self.addCleanup(repo.lock_read().unlock)
254 # We should have pushed 'C', but not 'B', since it isn't in the253 # We should have pushed revid_c, but not revid_b, since it isn't in the
255 # ancestry254 # ancestry
256 self.assertEqual(['A', 'C'], sorted(repo.all_revision_ids()))255 self.assertEqual([revid_a, revid_c], sorted(repo.all_revision_ids()))
257256
258 def test_push_with_default_stacking_does_not_create_broken_branch(self):257 def test_push_with_default_stacking_does_not_create_broken_branch(self):
259 """Pushing a new standalone branch works even when there's a default258 """Pushing a new standalone branch works even when there's a default
@@ -278,24 +277,22 @@
278 repo = self.make_repository('repo', shared=True, format='1.6')277 repo = self.make_repository('repo', shared=True, format='1.6')
279 builder = self.make_branch_builder('repo/local')278 builder = self.make_branch_builder('repo/local')
280 builder.start_series()279 builder.start_series()
281 builder.build_snapshot(None, [280 revid1 = builder.build_snapshot(None, [
282 ('add', ('', 'root-id', 'directory', '')),281 ('add', ('', 'root-id', 'directory', '')),
283 ('add', ('filename', 'f-id', 'file', 'content\n'))],282 ('add', ('filename', 'f-id', 'file', 'content\n'))])
284 revision_id='rev-1',)283 revid2 = builder.build_snapshot([revid1], [])
285 builder.build_snapshot(['rev-1'], [], revision_id='rev-2')284 revid3 = builder.build_snapshot([revid2],
286 builder.build_snapshot(['rev-2'],285 [('modify', ('f-id', 'new-content\n'))])
287 [('modify', ('f-id', 'new-content\n'))],
288 revision_id='rev-3')
289 builder.finish_series()286 builder.finish_series()
290 trunk = builder.get_branch()287 trunk = builder.get_branch()
291 # Sprout rev-1 to "trunk", so that we can stack on it.288 # Sprout rev-1 to "trunk", so that we can stack on it.
292 trunk.controldir.sprout(self.get_url('trunk'), revision_id='rev-1')289 trunk.controldir.sprout(self.get_url('trunk'), revision_id=revid1)
293 # Set a default stacking policy so that new branches will automatically290 # Set a default stacking policy so that new branches will automatically
294 # stack on trunk.291 # stack on trunk.
295 self.make_controldir('.').get_config().set_default_stack_on('trunk')292 self.make_controldir('.').get_config().set_default_stack_on('trunk')
296 # Push rev-2 to a new branch "remote". It will be stacked on "trunk".293 # Push rev-2 to a new branch "remote". It will be stacked on "trunk".
297 output = BytesIO()294 output = BytesIO()
298 push._show_push_branch(trunk, 'rev-2', self.get_url('remote'), output)295 push._show_push_branch(trunk, revid2, self.get_url('remote'), output)
299 # Push rev-3 onto "remote". If "remote" not stacked and is missing the296 # Push rev-3 onto "remote". If "remote" not stacked and is missing the
300 # fulltext record for f-id @ rev-1, then this will fail.297 # fulltext record for f-id @ rev-1, then this will fail.
301 remote_branch = branch.Branch.open(self.get_url('remote'))298 remote_branch = branch.Branch.open(self.get_url('remote'))
302299
=== modified file 'breezy/tests/per_branch/test_tags.py'
--- breezy/tests/per_branch/test_tags.py 2017-11-21 20:09:04 +0000
+++ breezy/tests/per_branch/test_tags.py 2017-11-29 12:08:31 +0000
@@ -116,29 +116,31 @@
116 self.fail("didn't get expected exception")116 self.fail("didn't get expected exception")
117117
118 def test_merge_tags(self):118 def test_merge_tags(self):
119 b1 = self.make_branch_with_revisions('b1', ['revid', 'revid-1'])119 b1, [revid, revid1] = self.make_branch_with_revision_tuple('b1', 2)
120 b2 = self.make_branch_with_revisions('b2', ['revid', 'revid-2'])120 w2 = b1.controldir.sprout('b2', revision_id=revid).open_workingtree()
121 revid2 = w2.commit('revision 2')
122 b2 = w2.branch
121 # if there are tags in the source and not the destination, then they123 # if there are tags in the source and not the destination, then they
122 # just go across124 # just go across
123 b1.tags.set_tag('tagname', 'revid')125 b1.tags.set_tag('tagname', revid)
124 b1.tags.merge_to(b2.tags)126 b1.tags.merge_to(b2.tags)
125 self.assertEqual(b2.tags.lookup_tag('tagname'), 'revid')127 self.assertEqual(b2.tags.lookup_tag('tagname'), revid)
126 # if a tag is in the destination and not in the source, it is not128 # if a tag is in the destination and not in the source, it is not
127 # removed when we merge them129 # removed when we merge them
128 b2.tags.set_tag('in-destination', 'revid')130 b2.tags.set_tag('in-destination', revid)
129 updates, conflicts = b1.tags.merge_to(b2.tags)131 updates, conflicts = b1.tags.merge_to(b2.tags)
130 self.assertEqual(list(conflicts), [])132 self.assertEqual(list(conflicts), [])
131 self.assertEqual(updates, {})133 self.assertEqual(updates, {})
132 self.assertEqual(b2.tags.lookup_tag('in-destination'), 'revid')134 self.assertEqual(b2.tags.lookup_tag('in-destination'), revid)
133 # if there's a conflicting tag, it's reported -- the command line135 # if there's a conflicting tag, it's reported -- the command line
134 # interface will say "these tags couldn't be copied"136 # interface will say "these tags couldn't be copied"
135 b1.tags.set_tag('conflicts', 'revid-1')137 b1.tags.set_tag('conflicts', revid1)
136 b2.tags.set_tag('conflicts', 'revid-2')138 b2.tags.set_tag('conflicts', revid2)
137 updates, conflicts = b1.tags.merge_to(b2.tags)139 updates, conflicts = b1.tags.merge_to(b2.tags)
138 self.assertEqual(list(conflicts), [('conflicts', 'revid-1', 'revid-2')])140 self.assertEqual(list(conflicts), [('conflicts', revid1, revid2)])
139 # and it keeps the same value141 # and it keeps the same value
140 self.assertEqual(updates, {})142 self.assertEqual(updates, {})
141 self.assertEqual(b2.tags.lookup_tag('conflicts'), 'revid-2')143 self.assertEqual(b2.tags.lookup_tag('conflicts'), revid2)
142144
143 def test_unicode_tag(self):145 def test_unicode_tag(self):
144 tag_name = u'\u3070'146 tag_name = u'\u3070'
@@ -238,7 +240,7 @@
238240
239 def test_merge_to_invalides_cache(self):241 def test_merge_to_invalides_cache(self):
240 b1, revids = self.make_write_locked_branch_with_one_tag()242 b1, revids = self.make_write_locked_branch_with_one_tag()
241 b2 = self.make_branch_with_revisions('b2', [revids[1], revids[0]])243 b2 = b1.controldir.sprout('b2').open_branch()
242 b2.tags.set_tag('two', revids[1])244 b2.tags.set_tag('two', revids[1])
243 b2.tags.merge_to(b1.tags)245 b2.tags.merge_to(b1.tags)
244 self.assertEqual(246 self.assertEqual(
245247
=== modified file 'breezy/tests/per_controldir/test_controldir.py'
--- breezy/tests/per_controldir/test_controldir.py 2017-08-10 01:21:20 +0000
+++ breezy/tests/per_controldir/test_controldir.py 2017-11-29 12:08:31 +0000
@@ -314,6 +314,8 @@
314 tree.add('foo')314 tree.add('foo')
315 rev1 = tree.commit('revision 1')315 rev1 = tree.commit('revision 1')
316 tree_repo = tree.branch.repository316 tree_repo = tree.branch.repository
317 if not tree_repo._format.supports_revision_signatures:
318 self.skipTest('repository format does not support signing')
317 tree_repo.lock_write()319 tree_repo.lock_write()
318 tree_repo.start_write_group()320 tree_repo.start_write_group()
319 tree_repo.sign_revision(rev1, gpg.LoopbackGPGStrategy(None))321 tree_repo.sign_revision(rev1, gpg.LoopbackGPGStrategy(None))
320322
=== modified file 'breezy/tests/per_intertree/test_compare.py'
--- breezy/tests/per_intertree/test_compare.py 2017-11-12 20:44:54 +0000
+++ breezy/tests/per_intertree/test_compare.py 2017-11-29 12:08:31 +0000
@@ -130,9 +130,9 @@
130 d = self.intertree_class(tree1, tree2).compare()130 d = self.intertree_class(tree1, tree2).compare()
131 self.assertEqual([], d.added)131 self.assertEqual([], d.added)
132 self.assertEqual([], d.modified)132 self.assertEqual([], d.modified)
133 self.assertEqual([('a', 'a-id', 'file'),133 self.assertEqual([('a', tree1.path2id('a'), 'file'),
134 ('b', 'b-id', 'directory'),134 ('b', tree1.path2id('b'), 'directory'),
135 ('b/c', 'c-id', 'file'),135 ('b/c', tree1.path2id('b/c'), 'file'),
136 ], d.removed)136 ], d.removed)
137 self.assertEqual([], d.renamed)137 self.assertEqual([], d.renamed)
138 self.assertEqual([], d.unchanged)138 self.assertEqual([], d.unchanged)
@@ -146,7 +146,7 @@
146 tree1, tree2 = self.mutable_trees_to_test_trees(self, tree1, tree2)146 tree1, tree2 = self.mutable_trees_to_test_trees(self, tree1, tree2)
147 d = self.intertree_class(tree1, tree2).compare()147 d = self.intertree_class(tree1, tree2).compare()
148 self.assertEqual([], d.added)148 self.assertEqual([], d.added)
149 self.assertEqual([('a', 'a-id', 'file', True, False)], d.modified)149 self.assertEqual([('a', tree1.path2id('a'), 'file', True, False)], d.modified)
150 self.assertEqual([], d.removed)150 self.assertEqual([], d.removed)
151 self.assertEqual([], d.renamed)151 self.assertEqual([], d.renamed)
152 self.assertEqual([], d.unchanged)152 self.assertEqual([], d.unchanged)
@@ -160,7 +160,7 @@
160 tree1, tree2 = self.mutable_trees_to_test_trees(self, tree1, tree2)160 tree1, tree2 = self.mutable_trees_to_test_trees(self, tree1, tree2)
161 d = self.intertree_class(tree1, tree2).compare()161 d = self.intertree_class(tree1, tree2).compare()
162 self.assertEqual([], d.added)162 self.assertEqual([], d.added)
163 self.assertEqual([('b/c', 'c-id', 'file', False, True)], d.modified)163 self.assertEqual([('b/c', tree1.path2id('b/c'), 'file', False, True)], d.modified)
164 self.assertEqual([], d.removed)164 self.assertEqual([], d.removed)
165 self.assertEqual([], d.renamed)165 self.assertEqual([], d.renamed)
166 self.assertEqual([], d.unchanged)166 self.assertEqual([], d.unchanged)
@@ -176,7 +176,7 @@
176 self.assertEqual([], d.added)176 self.assertEqual([], d.added)
177 self.assertEqual([], d.modified)177 self.assertEqual([], d.modified)
178 self.assertEqual([], d.removed)178 self.assertEqual([], d.removed)
179 self.assertEqual([('a', 'd', 'a-id', 'file', False, False)], d.renamed)179 self.assertEqual([('a', 'd', tree1.path2id('a'), 'file', False, False)], d.renamed)
180 self.assertEqual([], d.unchanged)180 self.assertEqual([], d.unchanged)
181181
182 def test_file_rename_and_modification(self):182 def test_file_rename_and_modification(self):
@@ -190,7 +190,7 @@
190 self.assertEqual([], d.added)190 self.assertEqual([], d.added)
191 self.assertEqual([], d.modified)191 self.assertEqual([], d.modified)
192 self.assertEqual([], d.removed)192 self.assertEqual([], d.removed)
193 self.assertEqual([('a', 'd', 'a-id', 'file', True, False)], d.renamed)193 self.assertEqual([('a', 'd', tree1.path2id('a'), 'file', True, False)], d.renamed)
194 self.assertEqual([], d.unchanged)194 self.assertEqual([], d.unchanged)
195195
196 def test_file_rename_and_meta_modification(self):196 def test_file_rename_and_meta_modification(self):
@@ -204,7 +204,7 @@
204 self.assertEqual([], d.added)204 self.assertEqual([], d.added)
205 self.assertEqual([], d.modified)205 self.assertEqual([], d.modified)
206 self.assertEqual([], d.removed)206 self.assertEqual([], d.removed)
207 self.assertEqual([('b/c', 'e', 'c-id', 'file', False, True)], d.renamed)207 self.assertEqual([('b/c', 'e', tree1.path2id('b/c'), 'file', False, True)], d.renamed)
208 self.assertEqual([], d.unchanged)208 self.assertEqual([], d.unchanged)
209209
210 def test_empty_to_abc_content_a_only(self):210 def test_empty_to_abc_content_a_only(self):
@@ -215,7 +215,7 @@
215 tree2 = self.get_tree_no_parents_abc_content(tree2)215 tree2 = self.get_tree_no_parents_abc_content(tree2)
216 tree1, tree2 = self.mutable_trees_to_test_trees(self, tree1, tree2)216 tree1, tree2 = self.mutable_trees_to_test_trees(self, tree1, tree2)
217 d = self.intertree_class(tree1, tree2).compare(specific_files=['a'])217 d = self.intertree_class(tree1, tree2).compare(specific_files=['a'])
218 self.assertEqual([('a', 'a-id', 'file')], d.added)218 self.assertEqual([('a', tree2.path2id('a'), 'file')], d.added)
219 self.assertEqual([], d.modified)219 self.assertEqual([], d.modified)
220 self.assertEqual([], d.removed)220 self.assertEqual([], d.removed)
221 self.assertEqual([], d.renamed)221 self.assertEqual([], d.renamed)
@@ -230,8 +230,9 @@
230 d = self.intertree_class(tree1, tree2).compare(230 d = self.intertree_class(tree1, tree2).compare(
231 specific_files=['a', 'b/c'])231 specific_files=['a', 'b/c'])
232 self.assertEqual(232 self.assertEqual(
233 [('a', 'a-id', 'file'), (u'b', 'b-id', 'directory'),233 [('a', tree2.path2id('a'), 'file'),
234 ('b/c', 'c-id', 'file')],234 (u'b', tree2.path2id('b'), 'directory'),
235 ('b/c', tree2.path2id('b/c'), 'file')],
235 d.added)236 d.added)
236 self.assertEqual([], d.modified)237 self.assertEqual([], d.modified)
237 self.assertEqual([], d.removed)238 self.assertEqual([], d.removed)
@@ -765,7 +766,7 @@
765 tree2 = self.get_tree_no_parents_abc_content_4(tree2)766 tree2 = self.get_tree_no_parents_abc_content_4(tree2)
766 tree1, tree2 = self.mutable_trees_to_test_trees(self, tree1, tree2)767 tree1, tree2 = self.mutable_trees_to_test_trees(self, tree1, tree2)
767 root_id = tree1.path2id('')768 root_id = tree1.path2id('')
768 self.assertEqual([('a-id', ('a', 'd'), False, (True, True),769 self.assertEqual([(tree1.path2id('a'), ('a', 'd'), False, (True, True),
769 (root_id, root_id), ('a', 'd'), ('file', 'file'),770 (root_id, root_id), ('a', 'd'), ('file', 'file'),
770 (False, False))],771 (False, False))],
771 self.do_iter_changes(tree1, tree2))772 self.do_iter_changes(tree1, tree2))
@@ -861,7 +862,7 @@
861 # d is new, d/e is b-id renamed, d/e/a is a-id renamed 862 # d is new, d/e is b-id renamed, d/e/a is a-id renamed
862 root_id = tree1.path2id('')863 root_id = tree1.path2id('')
863 self.assertEqualIterChanges(864 self.assertEqualIterChanges(
864 [self.renamed(tree1, tree2, 'b-id', False),865 [self.renamed(tree1, tree2, tree1.path2id('b'), False),
865 self.added(tree2, 'd-id'),866 self.added(tree2, 'd-id'),
866 self.renamed(tree1, tree2, 'a-id', False)],867 self.renamed(tree1, tree2, 'a-id', False)],
867 self.do_iter_changes(tree1, tree2, specific_files=['d/e/a']))868 self.do_iter_changes(tree1, tree2, specific_files=['d/e/a']))
868869
=== modified file 'breezy/tests/per_repository/test_fetch.py'
--- breezy/tests/per_repository/test_fetch.py 2017-11-19 19:00:31 +0000
+++ breezy/tests/per_repository/test_fetch.py 2017-11-29 12:08:31 +0000
@@ -320,27 +320,26 @@
320 def make_simple_branch_with_ghost(self):320 def make_simple_branch_with_ghost(self):
321 builder = self.make_branch_builder('source')321 builder = self.make_branch_builder('source')
322 builder.start_series()322 builder.start_series()
323 builder.build_snapshot(None, [323 a_revid = builder.build_snapshot(None, [
324 ('add', ('', 'root-id', 'directory', None)),324 ('add', ('', 'root-id', 'directory', None)),
325 ('add', ('file', 'file-id', 'file', 'content\n'))],325 ('add', ('file', 'file-id', 'file', 'content\n'))])
326 revision_id='A-id')326 b_revid = builder.build_snapshot([a_revid, 'ghost-id'], [])
327 builder.build_snapshot(['A-id', 'ghost-id'], [], revision_id='B-id')
328 builder.finish_series()327 builder.finish_series()
329 source_b = builder.get_branch()328 source_b = builder.get_branch()
330 source_b.lock_read()329 source_b.lock_read()
331 self.addCleanup(source_b.unlock)330 self.addCleanup(source_b.unlock)
332 return source_b331 return source_b, b_revid
333332
334 def test_fetch_with_ghost(self):333 def test_fetch_with_ghost(self):
335 source_b = self.make_simple_branch_with_ghost()334 source_b, b_revid = self.make_simple_branch_with_ghost()
336 target = self.make_repository('target')335 target = self.make_repository('target')
337 target.lock_write()336 target.lock_write()
338 self.addCleanup(target.unlock)337 self.addCleanup(target.unlock)
339 target.fetch(source_b.repository, revision_id='B-id')338 target.fetch(source_b.repository, revision_id=b_revid)
340339
341 def test_fetch_into_smart_with_ghost(self):340 def test_fetch_into_smart_with_ghost(self):
342 trans = self.make_smart_server('target')341 trans = self.make_smart_server('target')
343 source_b = self.make_simple_branch_with_ghost()342 source_b, b_revid = self.make_simple_branch_with_ghost()
344 if not source_b.controldir._format.supports_transport(trans):343 if not source_b.controldir._format.supports_transport(trans):
345 raise TestNotApplicable("format does not support transport")344 raise TestNotApplicable("format does not support transport")
346 target = self.make_repository('target')345 target = self.make_repository('target')
@@ -349,7 +348,7 @@
349 target.lock_write()348 target.lock_write()
350 self.addCleanup(target.unlock)349 self.addCleanup(target.unlock)
351 try:350 try:
352 target.fetch(source_b.repository, revision_id='B-id')351 target.fetch(source_b.repository, revision_id=b_revid)
353 except errors.TokenLockingNotSupported:352 except errors.TokenLockingNotSupported:
354 # The code inside fetch() that tries to lock and then fails, also353 # The code inside fetch() that tries to lock and then fails, also
355 # causes weird problems with 'lock_not_held' later on...354 # causes weird problems with 'lock_not_held' later on...
@@ -359,7 +358,7 @@
359358
360 def test_fetch_from_smart_with_ghost(self):359 def test_fetch_from_smart_with_ghost(self):
361 trans = self.make_smart_server('source')360 trans = self.make_smart_server('source')
362 source_b = self.make_simple_branch_with_ghost()361 source_b, b_revid = self.make_simple_branch_with_ghost()
363 if not source_b.controldir._format.supports_transport(trans):362 if not source_b.controldir._format.supports_transport(trans):
364 raise TestNotApplicable("format does not support transport")363 raise TestNotApplicable("format does not support transport")
365 target = self.make_repository('target')364 target = self.make_repository('target')
@@ -369,5 +368,5 @@
369 source = repository.Repository.open(trans.base)368 source = repository.Repository.open(trans.base)
370 source.lock_read()369 source.lock_read()
371 self.addCleanup(source.unlock)370 self.addCleanup(source.unlock)
372 target.fetch(source, revision_id='B-id')371 target.fetch(source, revision_id=b_revid)
373372
374373
=== modified file 'breezy/tests/per_repository/test_repository.py'
--- breezy/tests/per_repository/test_repository.py 2017-11-21 00:38:51 +0000
+++ breezy/tests/per_repository/test_repository.py 2017-11-29 12:08:31 +0000
@@ -69,7 +69,7 @@
69 def assertFormatAttribute(self, attribute, allowed_values):69 def assertFormatAttribute(self, attribute, allowed_values):
70 """Assert that the format has an attribute 'attribute'."""70 """Assert that the format has an attribute 'attribute'."""
71 repo = self.make_repository('repo')71 repo = self.make_repository('repo')
72 self.assertSubset([getattr(repo._format, attribute)], allowed_values)72 self.assertIn(getattr(repo._format, attribute), allowed_values)
7373
74 def test_attribute_fast_deltas(self):74 def test_attribute_fast_deltas(self):
75 """Test the format.fast_deltas attribute."""75 """Test the format.fast_deltas attribute."""
@@ -118,6 +118,7 @@
118 self.assertFormatAttribute('supports_setting_revision_ids',118 self.assertFormatAttribute('supports_setting_revision_ids',
119 (True, False))119 (True, False))
120120
121<<<<<<< TREE
121 def test_attribute_format_supports_storing_branch_nick(self):122 def test_attribute_format_supports_storing_branch_nick(self):
122 self.assertFormatAttribute('supports_storing_branch_nick',123 self.assertFormatAttribute('supports_storing_branch_nick',
123 (True, False))124 (True, False))
@@ -137,13 +138,34 @@
137 self.assertRaises(TypeError, repo._format.open,138 self.assertRaises(TypeError, repo._format.open,
138 repo.controldir, _override_transport=backup_transport)139 repo.controldir, _override_transport=backup_transport)
139140
141=======
142 def test_attribute_format_supports_storing_branch_nick(self):
143 self.assertFormatAttribute('supports_storing_branch_nick',
144 (True, False))
145
146 def test_attribute_format_supports_overriding_transport(self):
147 repo = self.make_repository('repo')
148 self.assertIn(repo._format.supports_overriding_transport, (True, False))
149
150 repo.control_transport.copy_tree('.', '../repository.backup')
151 backup_transport = repo.control_transport.clone('../repository.backup')
152 if repo._format.supports_overriding_transport:
153 backup = repo._format.open(
154 repo.controldir,
155 _override_transport=backup_transport)
156 self.assertIs(backup_transport, backup.control_transport)
157 else:
158 self.assertRaises(TypeError, repo._format.open,
159 repo.controldir, _override_transport=backup_transport)
160
161>>>>>>> MERGE-SOURCE
140 def test_format_is_deprecated(self):162 def test_format_is_deprecated(self):
141 repo = self.make_repository('repo')163 repo = self.make_repository('repo')
142 self.assertSubset([repo._format.is_deprecated()], (True, False))164 self.assertIn(repo._format.is_deprecated(), (True, False))
143165
144 def test_format_is_supported(self):166 def test_format_is_supported(self):
145 repo = self.make_repository('repo')167 repo = self.make_repository('repo')
146 self.assertSubset([repo._format.is_supported()], (True, False))168 self.assertIn(repo._format.is_supported(), (True, False))
147169
148 def test_clone_to_default_format(self):170 def test_clone_to_default_format(self):
149 #TODO: Test that cloning a repository preserves all the information171 #TODO: Test that cloning a repository preserves all the information
@@ -387,8 +409,7 @@
387409
388 def test_format_supports_external_lookups(self):410 def test_format_supports_external_lookups(self):
389 repo = self.make_repository('.')411 repo = self.make_repository('.')
390 self.assertSubset(412 self.assertIn(repo._format.supports_external_lookups, (True, False))
391 [repo._format.supports_external_lookups], (True, False))
392413
393 def assertMessageRoundtrips(self, message):414 def assertMessageRoundtrips(self, message):
394 """Assert that message roundtrips to a repository and back intact."""415 """Assert that message roundtrips to a repository and back intact."""
395416
=== modified file 'breezy/tests/per_tree/test_annotate_iter.py'
--- breezy/tests/per_tree/test_annotate_iter.py 2017-11-12 13:09:58 +0000
+++ breezy/tests/per_tree/test_annotate_iter.py 2017-11-29 12:08:31 +0000
@@ -32,6 +32,8 @@
3232
33 def get_tree_with_ghost(self):33 def get_tree_with_ghost(self):
34 tree = self.make_branch_and_tree('tree')34 tree = self.make_branch_and_tree('tree')
35 if not tree.branch.repository._format.supports_ghosts:
36 self.skipTest('repository format does not support ghosts')
35 self.build_tree_contents([('tree/one', 'first\ncontent\n')])37 self.build_tree_contents([('tree/one', 'first\ncontent\n')])
36 tree.add(['one'])38 tree.add(['one'])
37 rev_1 = tree.commit('one')39 rev_1 = tree.commit('one')
3840
=== modified file 'breezy/tests/per_workingtree/test_add.py'
--- breezy/tests/per_workingtree/test_add.py 2017-11-19 18:57:33 +0000
+++ breezy/tests/per_workingtree/test_add.py 2017-11-29 12:08:31 +0000
@@ -58,6 +58,8 @@
58 def test_add_old_id(self):58 def test_add_old_id(self):
59 """We can add an old id, as long as it doesn't exist now."""59 """We can add an old id, as long as it doesn't exist now."""
60 tree = self.make_branch_and_tree('.')60 tree = self.make_branch_and_tree('.')
61 if not tree.supports_setting_file_ids():
62 self.skipTest("tree does not support setting file ids")
61 self.build_tree(['a', 'b'])63 self.build_tree(['a', 'b'])
62 tree.add(['a'])64 tree.add(['a'])
63 file_id = tree.path2id('a')65 file_id = tree.path2id('a')
6466
=== modified file 'breezy/tests/per_workingtree/test_annotate_iter.py'
--- breezy/tests/per_workingtree/test_annotate_iter.py 2017-11-19 19:00:31 +0000
+++ breezy/tests/per_workingtree/test_annotate_iter.py 2017-11-29 12:08:31 +0000
@@ -23,159 +23,159 @@
2323
24 def make_single_rev_tree(self):24 def make_single_rev_tree(self):
25 builder = self.make_branch_builder('branch')25 builder = self.make_branch_builder('branch')
26 builder.build_snapshot(None, [26 revid = builder.build_snapshot(None, [
27 ('add', ('', 'TREE_ROOT', 'directory', None)),27 ('add', ('', 'TREE_ROOT', 'directory', None)),
28 ('add', ('file', 'file-id', 'file', 'initial content\n')),28 ('add', ('file', 'file-id', 'file', 'initial content\n')),
29 ], revision_id='rev-1')29 ])
30 b = builder.get_branch()30 b = builder.get_branch()
31 tree = b.create_checkout('tree', lightweight=True)31 tree = b.create_checkout('tree', lightweight=True)
32 tree.lock_read()32 tree.lock_read()
33 self.addCleanup(tree.unlock)33 self.addCleanup(tree.unlock)
34 return tree34 return tree, revid
3535
36 def test_annotate_same_as_parent(self):36 def test_annotate_same_as_parent(self):
37 tree = self.make_single_rev_tree()37 tree, revid = self.make_single_rev_tree()
38 annotations = tree.annotate_iter('file')38 annotations = tree.annotate_iter('file')
39 self.assertEqual([('rev-1', 'initial content\n')],39 self.assertEqual([(revid, 'initial content\n')],
40 annotations)40 annotations)
4141
42 def test_annotate_mod_from_parent(self):42 def test_annotate_mod_from_parent(self):
43 tree = self.make_single_rev_tree()43 tree, revid = self.make_single_rev_tree()
44 self.build_tree_contents([('tree/file',44 self.build_tree_contents([('tree/file',
45 'initial content\nnew content\n')])45 'initial content\nnew content\n')])
46 annotations = tree.annotate_iter('file')46 annotations = tree.annotate_iter('file')
47 self.assertEqual([('rev-1', 'initial content\n'),47 self.assertEqual([(revid, 'initial content\n'),
48 ('current:', 'new content\n'),48 ('current:', 'new content\n'),
49 ], annotations)49 ], annotations)
5050
51 def test_annotate_merge_parents(self):51 def test_annotate_merge_parents(self):
52 builder = self.make_branch_builder('branch')52 builder = self.make_branch_builder('branch')
53 builder.start_series()53 builder.start_series()
54 builder.build_snapshot(None, [54 revid1 = builder.build_snapshot(None, [
55 ('add', ('', 'TREE_ROOT', 'directory', None)),55 ('add', ('', 'TREE_ROOT', 'directory', None)),
56 ('add', ('file', 'file-id', 'file', 'initial content\n')),56 ('add', ('file', 'file-id', 'file', 'initial content\n')),
57 ], revision_id='rev-1')57 ])
58 builder.build_snapshot(['rev-1'], [58 revid2 = builder.build_snapshot([revid1], [
59 ('modify', ('file-id', 'initial content\ncontent in 2\n')),59 ('modify', ('file-id', 'initial content\ncontent in 2\n')),
60 ], revision_id='rev-2')60 ])
61 builder.build_snapshot(['rev-1'], [61 revid3 = builder.build_snapshot([revid1], [
62 ('modify', ('file-id', 'initial content\ncontent in 3\n')),62 ('modify', ('file-id', 'initial content\ncontent in 3\n')),
63 ], revision_id='rev-3')63 ])
64 builder.finish_series()64 builder.finish_series()
65 b = builder.get_branch()65 b = builder.get_branch()
66 tree = b.create_checkout('tree', revision_id='rev-2', lightweight=True)66 tree = b.create_checkout('tree', revision_id=revid2, lightweight=True)
67 tree.lock_write()67 tree.lock_write()
68 self.addCleanup(tree.unlock)68 self.addCleanup(tree.unlock)
69 tree.set_parent_ids(['rev-2', 'rev-3'])69 tree.set_parent_ids([revid2, revid3])
70 self.build_tree_contents([('tree/file',70 self.build_tree_contents([('tree/file',
71 'initial content\ncontent in 2\n'71 'initial content\ncontent in 2\n'
72 'content in 3\nnew content\n')])72 'content in 3\nnew content\n')])
73 annotations = tree.annotate_iter('file')73 annotations = tree.annotate_iter('file')
74 self.assertEqual([('rev-1', 'initial content\n'),74 self.assertEqual([(revid1, 'initial content\n'),
75 ('rev-2', 'content in 2\n'),75 (revid2, 'content in 2\n'),
76 ('rev-3', 'content in 3\n'),76 (revid3, 'content in 3\n'),
77 ('current:', 'new content\n'),77 ('current:', 'new content\n'),
78 ], annotations)78 ], annotations)
7979
80 def test_annotate_merge_parent_no_file(self):80 def test_annotate_merge_parent_no_file(self):
81 builder = self.make_branch_builder('branch')81 builder = self.make_branch_builder('branch')
82 builder.start_series()82 builder.start_series()
83 builder.build_snapshot(None, [83 revid1 = builder.build_snapshot(None, [
84 ('add', ('', 'TREE_ROOT', 'directory', None)),84 ('add', ('', 'TREE_ROOT', 'directory', None)),
85 ], revision_id='rev-1')85 ])
86 builder.build_snapshot(['rev-1'], [86 revid2 = builder.build_snapshot([revid1], [
87 ('add', ('file', 'file-id', 'file', 'initial content\n')),87 ('add', ('file', 'file-id', 'file', 'initial content\n')),
88 ], revision_id='rev-2')88 ])
89 builder.build_snapshot(['rev-1'], [], revision_id='rev-3')89 revid3 = builder.build_snapshot([revid1], [])
90 builder.finish_series()90 builder.finish_series()
91 b = builder.get_branch()91 b = builder.get_branch()
92 tree = b.create_checkout('tree', revision_id='rev-2', lightweight=True)92 tree = b.create_checkout('tree', revision_id=revid2, lightweight=True)
93 tree.lock_write()93 tree.lock_write()
94 self.addCleanup(tree.unlock)94 self.addCleanup(tree.unlock)
95 tree.set_parent_ids(['rev-2', 'rev-3'])95 tree.set_parent_ids([revid2, revid3])
96 self.build_tree_contents([('tree/file',96 self.build_tree_contents([('tree/file',
97 'initial content\nnew content\n')])97 'initial content\nnew content\n')])
98 annotations = tree.annotate_iter('file')98 annotations = tree.annotate_iter('file')
99 self.assertEqual([('rev-2', 'initial content\n'),99 self.assertEqual([(revid2, 'initial content\n'),
100 ('current:', 'new content\n'),100 ('current:', 'new content\n'),
101 ], annotations)101 ], annotations)
102102
103 def test_annotate_merge_parent_was_directory(self):103 def test_annotate_merge_parent_was_directory(self):
104 builder = self.make_branch_builder('branch')104 builder = self.make_branch_builder('branch')
105 builder.start_series()105 builder.start_series()
106 builder.build_snapshot(None, [106 revid1 = builder.build_snapshot(None, [
107 ('add', ('', 'TREE_ROOT', 'directory', None)),107 ('add', ('', 'TREE_ROOT', 'directory', None)),
108 ], revision_id='rev-1')108 ])
109 builder.build_snapshot(['rev-1'], [109 revid2 = builder.build_snapshot([revid1], [
110 ('add', ('file', 'file-id', 'file', 'initial content\n')),110 ('add', ('file', 'file-id', 'file', 'initial content\n')),
111 ], revision_id='rev-2')111 ])
112 builder.build_snapshot(['rev-1'], [112 revid3 = builder.build_snapshot([revid1], [
113 ('add', ('a_dir', 'file-id', 'directory', None)),113 ('add', ('a_dir', 'file-id', 'directory', None)),
114 ], revision_id='rev-3')114 ])
115 builder.finish_series()115 builder.finish_series()
116 b = builder.get_branch()116 b = builder.get_branch()
117 tree = b.create_checkout('tree', revision_id='rev-2', lightweight=True)117 tree = b.create_checkout('tree', revision_id=revid2, lightweight=True)
118 tree.lock_write()118 tree.lock_write()
119 self.addCleanup(tree.unlock)119 self.addCleanup(tree.unlock)
120 tree.set_parent_ids(['rev-2', 'rev-3'])120 tree.set_parent_ids([revid2, revid3])
121 self.build_tree_contents([('tree/file',121 self.build_tree_contents([('tree/file',
122 'initial content\nnew content\n')])122 'initial content\nnew content\n')])
123 annotations = tree.annotate_iter('file')123 annotations = tree.annotate_iter('file')
124 self.assertEqual([('rev-2', 'initial content\n'),124 self.assertEqual([(revid2, 'initial content\n'),
125 ('current:', 'new content\n'),125 ('current:', 'new content\n'),
126 ], annotations)126 ], annotations)
127127
128 def test_annotate_same_as_merge_parent(self):128 def test_annotate_same_as_merge_parent(self):
129 builder = self.make_branch_builder('branch')129 builder = self.make_branch_builder('branch')
130 builder.start_series()130 builder.start_series()
131 builder.build_snapshot(None, [131 revid1 = builder.build_snapshot(None, [
132 ('add', ('', 'TREE_ROOT', 'directory', None)),132 ('add', ('', 'TREE_ROOT', 'directory', None)),
133 ('add', ('file', 'file-id', 'file', 'initial content\n')),133 ('add', ('file', 'file-id', 'file', 'initial content\n')),
134 ], revision_id='rev-1')134 ])
135 builder.build_snapshot(['rev-1'], [135 revid2 = builder.build_snapshot([revid1], [
136 ], revision_id='rev-2')136 ])
137 builder.build_snapshot(['rev-1'], [137 revid3 = builder.build_snapshot([revid1], [
138 ('modify', ('file-id', 'initial content\ncontent in 3\n')),138 ('modify', ('file-id', 'initial content\ncontent in 3\n')),
139 ], revision_id='rev-3')139 ])
140 builder.finish_series()140 builder.finish_series()
141 b = builder.get_branch()141 b = builder.get_branch()
142 tree = b.create_checkout('tree', revision_id='rev-2', lightweight=True)142 tree = b.create_checkout('tree', revision_id=revid2, lightweight=True)
143 tree.lock_write()143 tree.lock_write()
144 self.addCleanup(tree.unlock)144 self.addCleanup(tree.unlock)
145 tree.set_parent_ids(['rev-2', 'rev-3'])145 tree.set_parent_ids([revid2, revid3])
146 self.build_tree_contents([('tree/file',146 self.build_tree_contents([('tree/file',
147 'initial content\ncontent in 3\n')])147 'initial content\ncontent in 3\n')])
148 annotations = tree.annotate_iter('file')148 annotations = tree.annotate_iter('file')
149 self.assertEqual([('rev-1', 'initial content\n'),149 self.assertEqual([(revid1, 'initial content\n'),
150 ('rev-3', 'content in 3\n'),150 (revid3, 'content in 3\n'),
151 ], annotations)151 ], annotations)
152152
153 def test_annotate_same_as_merge_parent_supersedes(self):153 def test_annotate_same_as_merge_parent_supersedes(self):
154 builder = self.make_branch_builder('branch')154 builder = self.make_branch_builder('branch')
155 builder.start_series()155 builder.start_series()
156 builder.build_snapshot(None, [156 revid1 = builder.build_snapshot(None, [
157 ('add', ('', 'TREE_ROOT', 'directory', None)),157 ('add', ('', 'TREE_ROOT', 'directory', None)),
158 ('add', ('file', 'file-id', 'file', 'initial content\n')),158 ('add', ('file', 'file-id', 'file', 'initial content\n')),
159 ], revision_id='rev-1')159 ])
160 builder.build_snapshot(['rev-1'], [160 revid2 = builder.build_snapshot([revid1], [
161 ('modify', ('file-id', 'initial content\nnew content\n')),161 ('modify', ('file-id', 'initial content\nnew content\n')),
162 ], revision_id='rev-2')162 ])
163 builder.build_snapshot(['rev-2'], [163 revid3 = builder.build_snapshot([revid2], [
164 ('modify', ('file-id', 'initial content\ncontent in 3\n')),164 ('modify', ('file-id', 'initial content\ncontent in 3\n')),
165 ], revision_id='rev-3')165 ])
166 builder.build_snapshot(['rev-3'], [166 revid4 = builder.build_snapshot([revid3], [
167 ('modify', ('file-id', 'initial content\nnew content\n')),167 ('modify', ('file-id', 'initial content\nnew content\n')),
168 ], revision_id='rev-4')168 ])
169 # In this case, the content locally is the same as content in basis169 # In this case, the content locally is the same as content in basis
170 # tree, but the merge revision states that *it* should win170 # tree, but the merge revision states that *it* should win
171 builder.finish_series()171 builder.finish_series()
172 b = builder.get_branch()172 b = builder.get_branch()
173 tree = b.create_checkout('tree', revision_id='rev-2', lightweight=True)173 tree = b.create_checkout('tree', revision_id=revid2, lightweight=True)
174 tree.lock_write()174 tree.lock_write()
175 self.addCleanup(tree.unlock)175 self.addCleanup(tree.unlock)
176 tree.set_parent_ids(['rev-2', 'rev-4'])176 tree.set_parent_ids([revid2, revid4])
177 annotations = tree.annotate_iter('file')177 annotations = tree.annotate_iter('file')
178 self.assertEqual([('rev-1', 'initial content\n'),178 self.assertEqual([(revid1, 'initial content\n'),
179 ('rev-4', 'new content\n'),179 (revid4, 'new content\n'),
180 ], annotations)180 ], annotations)
181181
182182
=== modified file 'breezy/tests/per_workingtree/test_get_file_mtime.py'
--- breezy/tests/per_workingtree/test_get_file_mtime.py 2017-11-12 20:44:54 +0000
+++ breezy/tests/per_workingtree/test_get_file_mtime.py 2017-11-29 12:08:31 +0000
@@ -57,7 +57,7 @@
57 one_id = tree.path2id('one')57 one_id = tree.path2id('one')
5858
59 st = os.lstat('tree/one')59 st = os.lstat('tree/one')
60 tree.commit('one', rev_id='rev-1')60 tree.commit('one')
6161
62 tree.lock_read()62 tree.lock_read()
63 try:63 try:
6464
=== modified file 'breezy/tests/per_workingtree/test_set_root_id.py'
--- breezy/tests/per_workingtree/test_set_root_id.py 2017-06-10 00:17:06 +0000
+++ breezy/tests/per_workingtree/test_set_root_id.py 2017-11-29 12:08:31 +0000
@@ -32,6 +32,8 @@
32 # deliberately tests concurrent access that isn't possible on windows.32 # deliberately tests concurrent access that isn't possible on windows.
33 self.thisFailsStrictLockCheck()33 self.thisFailsStrictLockCheck()
34 tree = self.make_branch_and_tree('a-tree')34 tree = self.make_branch_and_tree('a-tree')
35 if not tree.supports_setting_file_ids():
36 self.skipTest('format does not support setting file ids')
35 # setting the root id allows it to be read via get_root_id.37 # setting the root id allows it to be read via get_root_id.
36 root_id = u'\xe5n-id'.encode('utf8')38 root_id = u'\xe5n-id'.encode('utf8')
37 tree.lock_write()39 tree.lock_write()
@@ -57,6 +59,8 @@
5759
58 def test_set_root_id(self):60 def test_set_root_id(self):
59 tree = self.make_branch_and_tree('.')61 tree = self.make_branch_and_tree('.')
62 if not tree.supports_setting_file_ids():
63 self.skipTest('format does not support setting file ids')
60 tree.lock_write()64 tree.lock_write()
61 self.addCleanup(tree.unlock)65 self.addCleanup(tree.unlock)
62 orig_root_id = tree.get_root_id()66 orig_root_id = tree.get_root_id()
6367
=== modified file 'breezy/tests/per_workingtree/test_walkdirs.py'
--- breezy/tests/per_workingtree/test_walkdirs.py 2017-11-14 01:20:44 +0000
+++ breezy/tests/per_workingtree/test_walkdirs.py 2017-11-29 12:08:31 +0000
@@ -84,7 +84,6 @@
84 def add_dirblock(path, kind):84 def add_dirblock(path, kind):
85 dirblock = DirBlock(tree, path)85 dirblock = DirBlock(tree, path)
86 if file_status != self.unknown:86 if file_status != self.unknown:
87 dirblock.id = 'a ' + str(path).replace('/', '-') + '-id'
88 dirblock.inventory_kind = kind87 dirblock.inventory_kind = kind
89 if file_status != self.missing:88 if file_status != self.missing:
90 dirblock.disk_kind = kind89 dirblock.disk_kind = kind
@@ -97,7 +96,10 @@
97 add_dirblock(paths[3], 'directory')96 add_dirblock(paths[3], 'directory')
9897
99 if file_status != self.unknown:98 if file_status != self.unknown:
100 tree.add(paths, [db.id for db in dirblocks])99 tree.add(paths)
100 for dirblock in dirblocks:
101 if file_status != self.unknown:
102 dirblock.id = tree.path2id(dirblock.relpath)
101103
102 if file_status == self.missing:104 if file_status == self.missing:
103 # now make the files be missing105 # now make the files be missing
104106
=== modified file 'breezy/tests/per_workingtree/test_workingtree.py'
--- breezy/tests/per_workingtree/test_workingtree.py 2017-11-21 20:09:04 +0000
+++ breezy/tests/per_workingtree/test_workingtree.py 2017-11-29 12:08:31 +0000
@@ -409,17 +409,17 @@
409 wt = self.make_branch_and_tree('source')409 wt = self.make_branch_and_tree('source')
410 self.build_tree(['added', 'deleted', 'notadded'],410 self.build_tree(['added', 'deleted', 'notadded'],
411 transport=wt.controldir.transport.clone('..'))411 transport=wt.controldir.transport.clone('..'))
412 wt.add('deleted', 'deleted')412 wt.add('deleted')
413 wt.commit('add deleted')413 wt.commit('add deleted')
414 wt.remove('deleted')414 wt.remove('deleted')
415 wt.add('added', 'added')415 wt.add('added')
416 cloned_dir = wt.controldir.clone('target')416 cloned_dir = wt.controldir.clone('target')
417 cloned = cloned_dir.open_workingtree()417 cloned = cloned_dir.open_workingtree()
418 cloned_transport = cloned.controldir.transport.clone('..')418 cloned_transport = cloned.controldir.transport.clone('..')
419 self.assertFalse(cloned_transport.has('deleted'))419 self.assertFalse(cloned_transport.has('deleted'))
420 self.assertTrue(cloned_transport.has('added'))420 self.assertTrue(cloned_transport.has('added'))
421 self.assertFalse(cloned_transport.has('notadded'))421 self.assertFalse(cloned_transport.has('notadded'))
422 self.assertEqual('added', cloned.path2id('added'))422 self.assertIsNot(None, cloned.path2id('added'))
423 self.assertEqual(None, cloned.path2id('deleted'))423 self.assertEqual(None, cloned.path2id('deleted'))
424 self.assertEqual(None, cloned.path2id('notadded'))424 self.assertEqual(None, cloned.path2id('notadded'))
425425
@@ -799,12 +799,13 @@
799 self.build_tree(['foo.pyc'])799 self.build_tree(['foo.pyc'])
800 # ensure that foo.pyc is ignored800 # ensure that foo.pyc is ignored
801 self.build_tree_contents([('.bzrignore', 'foo.pyc')])801 self.build_tree_contents([('.bzrignore', 'foo.pyc')])
802 tree.add('foo.pyc', 'anid')802 tree.add('foo.pyc')
803 anid = tree.path2id('foo.pyc')
803 tree.lock_read()804 tree.lock_read()
804 files = sorted(list(tree.list_files()))805 files = sorted(list(tree.list_files()))
805 tree.unlock()806 tree.unlock()
806 self.assertEqual((u'.bzrignore', '?', 'file', None), files[0][:-1])807 self.assertEqual((u'.bzrignore', '?', 'file', None), files[0][:-1])
807 self.assertEqual((u'foo.pyc', 'V', 'file', 'anid'), files[1][:-1])808 self.assertEqual((u'foo.pyc', 'V', 'file', anid), files[1][:-1])
808 self.assertEqual(2, len(files))809 self.assertEqual(2, len(files))
809810
810 def test_non_normalized_add_accessible(self):811 def test_non_normalized_add_accessible(self):
811812
=== modified file 'breezy/workingtree.py'
--- breezy/workingtree.py 2017-11-17 03:06:50 +0000
+++ breezy/workingtree.py 2017-11-29 12:08:31 +0000
@@ -38,9 +38,6 @@
3838
39from .lazy_import import lazy_import39from .lazy_import import lazy_import
40lazy_import(globals(), """40lazy_import(globals(), """
41from bisect import bisect_left
42import itertools
43import operator
44import stat41import stat
4542
46from breezy import (43from breezy import (
@@ -393,7 +390,7 @@
393 except errors.NoSuchRevision:390 except errors.NoSuchRevision:
394 pass391 pass
395 # No cached copy available, retrieve from the repository.392 # No cached copy available, retrieve from the repository.
396 # FIXME? RBC 20060403 should we cache the inventory locally393 # FIXME? RBC 20060403 should we cache the tree locally
397 # at this point ?394 # at this point ?
398 try:395 try:
399 return self.branch.repository.revision_tree(revision_id)396 return self.branch.repository.revision_tree(revision_id)
@@ -762,7 +759,7 @@
762 because of a merge.759 because of a merge.
763760
764 This returns a map of file_id->sha1, containing only files which are761 This returns a map of file_id->sha1, containing only files which are
765 still in the working inventory and have that text hash.762 still in the working tree and have that text hash.
766 """763 """
767 raise NotImplementedError(self.merge_modified)764 raise NotImplementedError(self.merge_modified)
768765
@@ -1155,8 +1152,8 @@
1155 def revision_tree(self, revision_id):1152 def revision_tree(self, revision_id):
1156 """See Tree.revision_tree.1153 """See Tree.revision_tree.
11571154
1158 WorkingTree can supply revision_trees for the basis revision only1155 For trees that can be obtained from the working tree, this
1159 because there is only one cached inventory in the bzr directory.1156 will do so. For other trees, it will fall back to the repository.
1160 """1157 """
1161 raise NotImplementedError(self.revision_tree)1158 raise NotImplementedError(self.revision_tree)
11621159
@@ -1350,124 +1347,7 @@
1350 If the tree is not locked, it may cause an error to be raised,1347 If the tree is not locked, it may cause an error to be raised,
1351 depending on the tree implementation.1348 depending on the tree implementation.
1352 """1349 """
1353 disk_top = self.abspath(prefix)1350 raise NotImplementedError(self.walkdirs)
1354 if disk_top.endswith('/'):
1355 disk_top = disk_top[:-1]
1356 top_strip_len = len(disk_top) + 1
1357 inventory_iterator = self._walkdirs(prefix)
1358 disk_iterator = osutils.walkdirs(disk_top, prefix)
1359 try:
1360 current_disk = next(disk_iterator)
1361 disk_finished = False
1362 except OSError as e:
1363 if not (e.errno == errno.ENOENT or
1364 (sys.platform == 'win32' and e.errno == ERROR_PATH_NOT_FOUND)):
1365 raise
1366 current_disk = None
1367 disk_finished = True
1368 try:
1369 current_inv = next(inventory_iterator)
1370 inv_finished = False
1371 except StopIteration:
1372 current_inv = None
1373 inv_finished = True
1374 while not inv_finished or not disk_finished:
1375 if current_disk:
1376 ((cur_disk_dir_relpath, cur_disk_dir_path_from_top),
1377 cur_disk_dir_content) = current_disk
1378 else:
1379 ((cur_disk_dir_relpath, cur_disk_dir_path_from_top),
1380 cur_disk_dir_content) = ((None, None), None)
1381 if not disk_finished:
1382 # strip out .bzr dirs
1383 if (cur_disk_dir_path_from_top[top_strip_len:] == '' and
1384 len(cur_disk_dir_content) > 0):
1385 # osutils.walkdirs can be made nicer -
1386 # yield the path-from-prefix rather than the pathjoined
1387 # value.
1388 bzrdir_loc = bisect_left(cur_disk_dir_content,
1389 ('.bzr', '.bzr'))
1390 if (bzrdir_loc < len(cur_disk_dir_content)
1391 and self.controldir.is_control_filename(
1392 cur_disk_dir_content[bzrdir_loc][0])):
1393 # we dont yield the contents of, or, .bzr itself.
1394 del cur_disk_dir_content[bzrdir_loc]
1395 if inv_finished:
1396 # everything is unknown
1397 direction = 1
1398 elif disk_finished:
1399 # everything is missing
1400 direction = -1
1401 else:
1402 direction = cmp(current_inv[0][0], cur_disk_dir_relpath)
1403 if direction > 0:
1404 # disk is before inventory - unknown
1405 dirblock = [(relpath, basename, kind, stat, None, None) for
1406 relpath, basename, kind, stat, top_path in
1407 cur_disk_dir_content]
1408 yield (cur_disk_dir_relpath, None), dirblock
1409 try:
1410 current_disk = next(disk_iterator)
1411 except StopIteration:
1412 disk_finished = True
1413 elif direction < 0:
1414 # inventory is before disk - missing.
1415 dirblock = [(relpath, basename, 'unknown', None, fileid, kind)
1416 for relpath, basename, dkind, stat, fileid, kind in
1417 current_inv[1]]
1418 yield (current_inv[0][0], current_inv[0][1]), dirblock
1419 try:
1420 current_inv = next(inventory_iterator)
1421 except StopIteration:
1422 inv_finished = True
1423 else:
1424 # versioned present directory
1425 # merge the inventory and disk data together
1426 dirblock = []
1427 for relpath, subiterator in itertools.groupby(sorted(
1428 current_inv[1] + cur_disk_dir_content,
1429 key=operator.itemgetter(0)), operator.itemgetter(1)):
1430 path_elements = list(subiterator)
1431 if len(path_elements) == 2:
1432 inv_row, disk_row = path_elements
1433 # versioned, present file
1434 dirblock.append((inv_row[0],
1435 inv_row[1], disk_row[2],
1436 disk_row[3], inv_row[4],
1437 inv_row[5]))
1438 elif len(path_elements[0]) == 5:
1439 # unknown disk file
1440 dirblock.append((path_elements[0][0],
1441 path_elements[0][1], path_elements[0][2],
1442 path_elements[0][3], None, None))
1443 elif len(path_elements[0]) == 6:
1444 # versioned, absent file.
1445 dirblock.append((path_elements[0][0],
1446 path_elements[0][1], 'unknown', None,
1447 path_elements[0][4], path_elements[0][5]))
1448 else:
1449 raise NotImplementedError('unreachable code')
1450 yield current_inv[0], dirblock
1451 try:
1452 current_inv = next(inventory_iterator)
1453 except StopIteration:
1454 inv_finished = True
1455 try:
1456 current_disk = next(disk_iterator)
1457 except StopIteration:
1458 disk_finished = True
1459
1460 def _walkdirs(self, prefix=""):
1461 """Walk the directories of this tree.
1462
1463 :param prefix: is used as the directrory to start with.
1464 :returns: a generator which yields items in the form::
1465
1466 ((curren_directory_path, fileid),
1467 [(file1_path, file1_name, file1_kind, None, file1_id,
1468 file1_kind), ... ])
1469 """
1470 raise NotImplementedError(self._walkdirs)
14711351
1472 def auto_resolve(self):1352 def auto_resolve(self):
1473 """Automatically resolve text conflicts according to contents.1353 """Automatically resolve text conflicts according to contents.

Subscribers

People subscribed via source and target branches

to all changes: