Merge lp:~jelmer/brz/inv-not-dict into lp:brz
- inv-not-dict
- Merge into trunk
Proposed by
Jelmer Vernooij
Status: | Merged |
---|---|
Approved by: | Jelmer Vernooij |
Approved revision: | no longer in the source branch. |
Merge reported by: | The Breezy Bot |
Merged at revision: | not available |
Proposed branch: | lp:~jelmer/brz/inv-not-dict |
Merge into: | lp:brz |
Diff against target: |
1417 lines (+210/-210) 26 files modified
breezy/bundle/serializer/v08.py (+1/-1) breezy/bzr/inventory.py (+38/-38) breezy/bzr/inventorytree.py (+11/-11) breezy/bzr/vf_repository.py (+6/-6) breezy/bzr/workingtree.py (+13/-13) breezy/bzr/workingtree_4.py (+8/-8) breezy/merge.py (+4/-4) breezy/plugins/fastimport/bzr_commit_handler.py (+8/-8) breezy/plugins/fastimport/revision_store.py (+6/-6) breezy/plugins/repodebug/file_refs.py (+1/-1) breezy/tests/per_interrepository/test_fetch.py (+1/-1) breezy/tests/per_intertree/test_compare.py (+1/-1) breezy/tests/per_inventory/basics.py (+2/-2) breezy/tests/per_repository_reference/test_add_inventory.py (+2/-2) breezy/tests/per_repository_vf/test_add_inventory_by_delta.py (+2/-2) breezy/tests/per_repository_vf/test_fileid_involved.py (+1/-1) breezy/tests/per_tree/test_tree.py (+1/-1) breezy/tests/per_workingtree/test_nested_specifics.py (+1/-1) breezy/tests/per_workingtree/test_parents.py (+4/-4) breezy/tests/test_commit.py (+6/-6) breezy/tests/test_dirstate.py (+1/-1) breezy/tests/test_inv.py (+72/-72) breezy/tests/test_revisiontree.py (+1/-1) breezy/tests/test_transform.py (+1/-1) breezy/tests/test_xml.py (+17/-17) breezy/tree.py (+1/-1) |
To merge this branch: | bzr merge lp:~jelmer/brz/inv-not-dict |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
Martin Packman | Approve | ||
Review via email: mp+342053@code.launchpad.net |
Commit message
Description of the change
Remove the __getitem__, __iter__ and __delitem__ implementations from Inventory.
Instead, add .get_entry(), .all_file_ids() and .delete().
To post a comment you must log in.
Revision history for this message
The Breezy Bot (the-breezy-bot) wrote : | # |
Running landing tests failed
https:/
Preview Diff
[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1 | === modified file 'breezy/bundle/serializer/v08.py' | |||
2 | --- breezy/bundle/serializer/v08.py 2018-02-24 15:50:23 +0000 | |||
3 | +++ breezy/bundle/serializer/v08.py 2018-03-25 02:37:06 +0000 | |||
4 | @@ -281,7 +281,7 @@ | |||
5 | 281 | 281 | ||
6 | 282 | def finish_action(action, file_id, kind, meta_modified, text_modified, | 282 | def finish_action(action, file_id, kind, meta_modified, text_modified, |
7 | 283 | old_path, new_path): | 283 | old_path, new_path): |
9 | 284 | entry = new_tree.root_inventory[file_id] | 284 | entry = new_tree.root_inventory.get_entry(file_id) |
10 | 285 | if entry.revision != default_revision_id: | 285 | if entry.revision != default_revision_id: |
11 | 286 | action.add_utf8_property('last-changed', entry.revision) | 286 | action.add_utf8_property('last-changed', entry.revision) |
12 | 287 | if meta_modified: | 287 | if meta_modified: |
13 | 288 | 288 | ||
14 | === modified file 'breezy/bzr/inventory.py' | |||
15 | --- breezy/bzr/inventory.py 2018-03-24 17:48:04 +0000 | |||
16 | +++ breezy/bzr/inventory.py 2018-03-25 02:37:06 +0000 | |||
17 | @@ -112,7 +112,7 @@ | |||
18 | 112 | '2325' | 112 | '2325' |
19 | 113 | >>> i.add(InventoryFile('2326', 'wibble.c', '2325')) | 113 | >>> i.add(InventoryFile('2326', 'wibble.c', '2325')) |
20 | 114 | InventoryFile('2326', 'wibble.c', parent_id='2325', sha1=None, len=None, revision=None) | 114 | InventoryFile('2326', 'wibble.c', parent_id='2325', sha1=None, len=None, revision=None) |
22 | 115 | >>> i['2326'] | 115 | >>> i.get_entry('2326') |
23 | 116 | InventoryFile('2326', 'wibble.c', parent_id='2325', sha1=None, len=None, revision=None) | 116 | InventoryFile('2326', 'wibble.c', parent_id='2325', sha1=None, len=None, revision=None) |
24 | 117 | >>> for path, entry in i.iter_entries(): | 117 | >>> for path, entry in i.iter_entries(): |
25 | 118 | ... print path | 118 | ... print path |
26 | @@ -179,7 +179,7 @@ | |||
27 | 179 | # identify candidate head revision ids. | 179 | # identify candidate head revision ids. |
28 | 180 | for inv in previous_inventories: | 180 | for inv in previous_inventories: |
29 | 181 | try: | 181 | try: |
31 | 182 | ie = inv[self.file_id] | 182 | ie = inv.get_entry(self.file_id) |
32 | 183 | except errors.NoSuchId: | 183 | except errors.NoSuchId: |
33 | 184 | pass | 184 | pass |
34 | 185 | else: | 185 | else: |
35 | @@ -680,7 +680,7 @@ | |||
36 | 680 | from_dir = self.root | 680 | from_dir = self.root |
37 | 681 | yield '', self.root | 681 | yield '', self.root |
38 | 682 | elif isinstance(from_dir, (str, text_type)): | 682 | elif isinstance(from_dir, (str, text_type)): |
40 | 683 | from_dir = self[from_dir] | 683 | from_dir = self.get_entry(from_dir) |
41 | 684 | 684 | ||
42 | 685 | # unrolling the recursive called changed the time from | 685 | # unrolling the recursive called changed the time from |
43 | 686 | # 440ms/663ms (inline/total) to 116ms/116ms | 686 | # 440ms/663ms (inline/total) to 116ms/116ms |
44 | @@ -762,14 +762,14 @@ | |||
45 | 762 | except errors.NoSuchId: | 762 | except errors.NoSuchId: |
46 | 763 | pass | 763 | pass |
47 | 764 | else: | 764 | else: |
49 | 765 | yield path, self[file_id] | 765 | yield path, self.get_entry(file_id) |
50 | 766 | return | 766 | return |
51 | 767 | from_dir = self.root | 767 | from_dir = self.root |
52 | 768 | if (specific_file_ids is None or yield_parents or | 768 | if (specific_file_ids is None or yield_parents or |
53 | 769 | self.root.file_id in specific_file_ids): | 769 | self.root.file_id in specific_file_ids): |
54 | 770 | yield u'', self.root | 770 | yield u'', self.root |
55 | 771 | elif isinstance(from_dir, (str, text_type)): | 771 | elif isinstance(from_dir, (str, text_type)): |
57 | 772 | from_dir = self[from_dir] | 772 | from_dir = self.get_entry(from_dir) |
58 | 773 | 773 | ||
59 | 774 | if specific_file_ids is not None: | 774 | if specific_file_ids is not None: |
60 | 775 | # TODO: jam 20070302 This could really be done as a loop rather | 775 | # TODO: jam 20070302 This could really be done as a loop rather |
61 | @@ -779,7 +779,7 @@ | |||
62 | 779 | def add_ancestors(file_id): | 779 | def add_ancestors(file_id): |
63 | 780 | if not byid.has_id(file_id): | 780 | if not byid.has_id(file_id): |
64 | 781 | return | 781 | return |
66 | 782 | parent_id = byid[file_id].parent_id | 782 | parent_id = byid.get_entry(file_id).parent_id |
67 | 783 | if parent_id is None: | 783 | if parent_id is None: |
68 | 784 | return | 784 | return |
69 | 785 | if parent_id not in parents: | 785 | if parent_id not in parents: |
70 | @@ -811,8 +811,8 @@ | |||
71 | 811 | 811 | ||
72 | 812 | def _make_delta(self, old): | 812 | def _make_delta(self, old): |
73 | 813 | """Make an inventory delta from two inventories.""" | 813 | """Make an inventory delta from two inventories.""" |
76 | 814 | old_ids = set(old) | 814 | old_ids = set(old.iter_all_ids()) |
77 | 815 | new_ids = set(self) | 815 | new_ids = set(self.iter_all_ids()) |
78 | 816 | adds = new_ids - old_ids | 816 | adds = new_ids - old_ids |
79 | 817 | deletes = old_ids - new_ids | 817 | deletes = old_ids - new_ids |
80 | 818 | common = old_ids.intersection(new_ids) | 818 | common = old_ids.intersection(new_ids) |
81 | @@ -820,11 +820,11 @@ | |||
82 | 820 | for file_id in deletes: | 820 | for file_id in deletes: |
83 | 821 | delta.append((old.id2path(file_id), None, file_id, None)) | 821 | delta.append((old.id2path(file_id), None, file_id, None)) |
84 | 822 | for file_id in adds: | 822 | for file_id in adds: |
86 | 823 | delta.append((None, self.id2path(file_id), file_id, self[file_id])) | 823 | delta.append((None, self.id2path(file_id), file_id, self.get_entry(file_id))) |
87 | 824 | for file_id in common: | 824 | for file_id in common: |
89 | 825 | if old[file_id] != self[file_id]: | 825 | if old.get_entry(file_id) != self.get_entry(file_id): |
90 | 826 | delta.append((old.id2path(file_id), self.id2path(file_id), | 826 | delta.append((old.id2path(file_id), self.id2path(file_id), |
92 | 827 | file_id, self[file_id])) | 827 | file_id, self.get_entry(file_id))) |
93 | 828 | return delta | 828 | return delta |
94 | 829 | 829 | ||
95 | 830 | def make_entry(self, kind, name, parent_id, file_id=None): | 830 | def make_entry(self, kind, name, parent_id, file_id=None): |
96 | @@ -943,7 +943,7 @@ | |||
97 | 943 | >>> inv = Inventory() | 943 | >>> inv = Inventory() |
98 | 944 | >>> inv.add(InventoryFile('123-123', 'hello.c', ROOT_ID)) | 944 | >>> inv.add(InventoryFile('123-123', 'hello.c', ROOT_ID)) |
99 | 945 | InventoryFile('123-123', 'hello.c', parent_id='TREE_ROOT', sha1=None, len=None, revision=None) | 945 | InventoryFile('123-123', 'hello.c', parent_id='TREE_ROOT', sha1=None, len=None, revision=None) |
101 | 946 | >>> inv['123-123'].name | 946 | >>> inv.get_entry('123-123').name |
102 | 947 | 'hello.c' | 947 | 'hello.c' |
103 | 948 | 948 | ||
104 | 949 | Id's may be looked up from paths: | 949 | Id's may be looked up from paths: |
105 | @@ -1046,7 +1046,7 @@ | |||
106 | 1046 | for old_path, file_id in sorted(((op, f) for op, np, f, e in delta | 1046 | for old_path, file_id in sorted(((op, f) for op, np, f, e in delta |
107 | 1047 | if op is not None), reverse=True): | 1047 | if op is not None), reverse=True): |
108 | 1048 | # Preserve unaltered children of file_id for later reinsertion. | 1048 | # Preserve unaltered children of file_id for later reinsertion. |
110 | 1049 | file_id_children = getattr(self[file_id], 'children', {}) | 1049 | file_id_children = getattr(self.get_entry(file_id), 'children', {}) |
111 | 1050 | if len(file_id_children): | 1050 | if len(file_id_children): |
112 | 1051 | children[file_id] = file_id_children | 1051 | children[file_id] = file_id_children |
113 | 1052 | if self.id2path(file_id) != old_path: | 1052 | if self.id2path(file_id) != old_path: |
114 | @@ -1113,7 +1113,7 @@ | |||
115 | 1113 | other.add(entry.copy()) | 1113 | other.add(entry.copy()) |
116 | 1114 | return other | 1114 | return other |
117 | 1115 | 1115 | ||
119 | 1116 | def __iter__(self): | 1116 | def iter_all_ids(self): |
120 | 1117 | """Iterate over all file-ids.""" | 1117 | """Iterate over all file-ids.""" |
121 | 1118 | return iter(self._byid) | 1118 | return iter(self._byid) |
122 | 1119 | 1119 | ||
123 | @@ -1133,13 +1133,13 @@ | |||
124 | 1133 | """Returns number of entries.""" | 1133 | """Returns number of entries.""" |
125 | 1134 | return len(self._byid) | 1134 | return len(self._byid) |
126 | 1135 | 1135 | ||
128 | 1136 | def __getitem__(self, file_id): | 1136 | def get_entry(self, file_id): |
129 | 1137 | """Return the entry for given file_id. | 1137 | """Return the entry for given file_id. |
130 | 1138 | 1138 | ||
131 | 1139 | >>> inv = Inventory() | 1139 | >>> inv = Inventory() |
132 | 1140 | >>> inv.add(InventoryFile('123123', 'hello.c', ROOT_ID)) | 1140 | >>> inv.add(InventoryFile('123123', 'hello.c', ROOT_ID)) |
133 | 1141 | InventoryFile('123123', 'hello.c', parent_id='TREE_ROOT', sha1=None, len=None, revision=None) | 1141 | InventoryFile('123123', 'hello.c', parent_id='TREE_ROOT', sha1=None, len=None, revision=None) |
135 | 1142 | >>> inv['123123'].name | 1142 | >>> inv.get_entry('123123').name |
136 | 1143 | 'hello.c' | 1143 | 'hello.c' |
137 | 1144 | """ | 1144 | """ |
138 | 1145 | try: | 1145 | try: |
139 | @@ -1152,7 +1152,7 @@ | |||
140 | 1152 | return self._byid[file_id].kind | 1152 | return self._byid[file_id].kind |
141 | 1153 | 1153 | ||
142 | 1154 | def get_child(self, parent_id, filename): | 1154 | def get_child(self, parent_id, filename): |
144 | 1155 | return self[parent_id].children.get(filename) | 1155 | return self.get_entry(parent_id).children.get(filename) |
145 | 1156 | 1156 | ||
146 | 1157 | def _add_child(self, entry): | 1157 | def _add_child(self, entry): |
147 | 1158 | """Add an entry to the inventory, without adding it to its parent""" | 1158 | """Add an entry to the inventory, without adding it to its parent""" |
148 | @@ -1214,7 +1214,7 @@ | |||
149 | 1214 | ie = make_entry(kind, parts[-1], parent_id, file_id) | 1214 | ie = make_entry(kind, parts[-1], parent_id, file_id) |
150 | 1215 | return self.add(ie) | 1215 | return self.add(ie) |
151 | 1216 | 1216 | ||
153 | 1217 | def __delitem__(self, file_id): | 1217 | def delete(self, file_id): |
154 | 1218 | """Remove entry by id. | 1218 | """Remove entry by id. |
155 | 1219 | 1219 | ||
156 | 1220 | >>> inv = Inventory() | 1220 | >>> inv = Inventory() |
157 | @@ -1222,14 +1222,14 @@ | |||
158 | 1222 | InventoryFile('123', 'foo.c', parent_id='TREE_ROOT', sha1=None, len=None, revision=None) | 1222 | InventoryFile('123', 'foo.c', parent_id='TREE_ROOT', sha1=None, len=None, revision=None) |
159 | 1223 | >>> inv.has_id('123') | 1223 | >>> inv.has_id('123') |
160 | 1224 | True | 1224 | True |
162 | 1225 | >>> del inv['123'] | 1225 | >>> inv.delete('123') |
163 | 1226 | >>> inv.has_id('123') | 1226 | >>> inv.has_id('123') |
164 | 1227 | False | 1227 | False |
165 | 1228 | """ | 1228 | """ |
167 | 1229 | ie = self[file_id] | 1229 | ie = self.get_entry(file_id) |
168 | 1230 | del self._byid[file_id] | 1230 | del self._byid[file_id] |
169 | 1231 | if ie.parent_id is not None: | 1231 | if ie.parent_id is not None: |
171 | 1232 | del self[ie.parent_id].children[ie.name] | 1232 | del self.get_entry(ie.parent_id).children[ie.name] |
172 | 1233 | 1233 | ||
173 | 1234 | def __eq__(self, other): | 1234 | def __eq__(self, other): |
174 | 1235 | """Compare two sets by comparing their contents. | 1235 | """Compare two sets by comparing their contents. |
175 | @@ -1273,10 +1273,10 @@ | |||
176 | 1273 | 1273 | ||
177 | 1274 | def _make_delta(self, old): | 1274 | def _make_delta(self, old): |
178 | 1275 | """Make an inventory delta from two inventories.""" | 1275 | """Make an inventory delta from two inventories.""" |
183 | 1276 | old_getter = getattr(old, '_byid', old) | 1276 | old_getter = old.get_entry |
184 | 1277 | new_getter = self._byid | 1277 | new_getter = self.get_entry |
185 | 1278 | old_ids = set(old_getter) | 1278 | old_ids = set(old.iter_all_ids()) |
186 | 1279 | new_ids = set(new_getter) | 1279 | new_ids = set(self.iter_all_ids()) |
187 | 1280 | adds = new_ids - old_ids | 1280 | adds = new_ids - old_ids |
188 | 1281 | deletes = old_ids - new_ids | 1281 | deletes = old_ids - new_ids |
189 | 1282 | if not adds and not deletes: | 1282 | if not adds and not deletes: |
190 | @@ -1287,10 +1287,10 @@ | |||
191 | 1287 | for file_id in deletes: | 1287 | for file_id in deletes: |
192 | 1288 | delta.append((old.id2path(file_id), None, file_id, None)) | 1288 | delta.append((old.id2path(file_id), None, file_id, None)) |
193 | 1289 | for file_id in adds: | 1289 | for file_id in adds: |
195 | 1290 | delta.append((None, self.id2path(file_id), file_id, self[file_id])) | 1290 | delta.append((None, self.id2path(file_id), file_id, self.get_entry(file_id))) |
196 | 1291 | for file_id in common: | 1291 | for file_id in common: |
199 | 1292 | new_ie = new_getter[file_id] | 1292 | new_ie = new_getter(file_id) |
200 | 1293 | old_ie = old_getter[file_id] | 1293 | old_ie = old_getter(file_id) |
201 | 1294 | # If xml_serializer returns the cached InventoryEntries (rather | 1294 | # If xml_serializer returns the cached InventoryEntries (rather |
202 | 1295 | # than always doing .copy()), inlining the 'is' check saves 2.7M | 1295 | # than always doing .copy()), inlining the 'is' check saves 2.7M |
203 | 1296 | # calls to __eq__. Under lsprof this saves 20s => 6s. | 1296 | # calls to __eq__. Under lsprof this saves 20s => 6s. |
204 | @@ -1315,10 +1315,10 @@ | |||
205 | 1315 | if ie.kind == 'directory': | 1315 | if ie.kind == 'directory': |
206 | 1316 | to_find_delete.extend(viewvalues(ie.children)) | 1316 | to_find_delete.extend(viewvalues(ie.children)) |
207 | 1317 | for file_id in reversed(to_delete): | 1317 | for file_id in reversed(to_delete): |
209 | 1318 | ie = self[file_id] | 1318 | ie = self.get_entry(file_id) |
210 | 1319 | del self._byid[file_id] | 1319 | del self._byid[file_id] |
211 | 1320 | if ie.parent_id is not None: | 1320 | if ie.parent_id is not None: |
213 | 1321 | del self[ie.parent_id].children[ie.name] | 1321 | del self.get_entry(ie.parent_id).children[ie.name] |
214 | 1322 | else: | 1322 | else: |
215 | 1323 | self.root = None | 1323 | self.root = None |
216 | 1324 | 1324 | ||
217 | @@ -1707,7 +1707,7 @@ | |||
218 | 1707 | if old_path is None: | 1707 | if old_path is None: |
219 | 1708 | old_key = None | 1708 | old_key = None |
220 | 1709 | else: | 1709 | else: |
222 | 1710 | old_entry = self[file_id] | 1710 | old_entry = self.get_entry(file_id) |
223 | 1711 | old_key = self._parent_id_basename_key(old_entry) | 1711 | old_key = self._parent_id_basename_key(old_entry) |
224 | 1712 | if new_path is None: | 1712 | if new_path is None: |
225 | 1713 | new_key = None | 1713 | new_key = None |
226 | @@ -1728,7 +1728,7 @@ | |||
227 | 1728 | new_key, [None, None])[1] = new_value | 1728 | new_key, [None, None])[1] = new_value |
228 | 1729 | # validate that deletes are complete. | 1729 | # validate that deletes are complete. |
229 | 1730 | for file_id in deletes: | 1730 | for file_id in deletes: |
231 | 1731 | entry = self[file_id] | 1731 | entry = self.get_entry(file_id) |
232 | 1732 | if entry.kind != 'directory': | 1732 | if entry.kind != 'directory': |
233 | 1733 | continue | 1733 | continue |
234 | 1734 | # This loop could potentially be better by using the id_basename | 1734 | # This loop could potentially be better by using the id_basename |
235 | @@ -1754,7 +1754,7 @@ | |||
236 | 1754 | parents.discard(('', None)) | 1754 | parents.discard(('', None)) |
237 | 1755 | for parent_path, parent in parents: | 1755 | for parent_path, parent in parents: |
238 | 1756 | try: | 1756 | try: |
240 | 1757 | if result[parent].kind != 'directory': | 1757 | if result.get_entry(parent).kind != 'directory': |
241 | 1758 | raise errors.InconsistentDelta(result.id2path(parent), parent, | 1758 | raise errors.InconsistentDelta(result.id2path(parent), parent, |
242 | 1759 | 'Not a directory, but given children') | 1759 | 'Not a directory, but given children') |
243 | 1760 | except errors.NoSuchId: | 1760 | except errors.NoSuchId: |
244 | @@ -1880,7 +1880,7 @@ | |||
245 | 1880 | parent_id = b'' | 1880 | parent_id = b'' |
246 | 1881 | return StaticTuple(parent_id, entry.name.encode('utf8')).intern() | 1881 | return StaticTuple(parent_id, entry.name.encode('utf8')).intern() |
247 | 1882 | 1882 | ||
249 | 1883 | def __getitem__(self, file_id): | 1883 | def get_entry(self, file_id): |
250 | 1884 | """map a single file_id -> InventoryEntry.""" | 1884 | """map a single file_id -> InventoryEntry.""" |
251 | 1885 | if file_id is None: | 1885 | if file_id is None: |
252 | 1886 | raise errors.NoSuchId(self, file_id) | 1886 | raise errors.NoSuchId(self, file_id) |
253 | @@ -1895,7 +1895,7 @@ | |||
254 | 1895 | raise errors.NoSuchId(self, file_id) | 1895 | raise errors.NoSuchId(self, file_id) |
255 | 1896 | 1896 | ||
256 | 1897 | def _getitems(self, file_ids): | 1897 | def _getitems(self, file_ids): |
258 | 1898 | """Similar to __getitem__, but lets you query for multiple. | 1898 | """Similar to get_entry, but lets you query for multiple. |
259 | 1899 | 1899 | ||
260 | 1900 | The returned order is undefined. And currently if an item doesn't | 1900 | The returned order is undefined. And currently if an item doesn't |
261 | 1901 | exist, it isn't included in the output. | 1901 | exist, it isn't included in the output. |
262 | @@ -1929,13 +1929,13 @@ | |||
263 | 1929 | """Yield the parents of file_id up to the root.""" | 1929 | """Yield the parents of file_id up to the root.""" |
264 | 1930 | while file_id is not None: | 1930 | while file_id is not None: |
265 | 1931 | try: | 1931 | try: |
267 | 1932 | ie = self[file_id] | 1932 | ie = self.get_entry(file_id) |
268 | 1933 | except KeyError: | 1933 | except KeyError: |
269 | 1934 | raise errors.NoSuchId(tree=self, file_id=file_id) | 1934 | raise errors.NoSuchId(tree=self, file_id=file_id) |
270 | 1935 | yield ie | 1935 | yield ie |
271 | 1936 | file_id = ie.parent_id | 1936 | file_id = ie.parent_id |
272 | 1937 | 1937 | ||
274 | 1938 | def __iter__(self): | 1938 | def iter_all_ids(self): |
275 | 1939 | """Iterate over all file-ids.""" | 1939 | """Iterate over all file-ids.""" |
276 | 1940 | for key, _ in self.id_to_entry.iteritems(): | 1940 | for key, _ in self.id_to_entry.iteritems(): |
277 | 1941 | yield key[-1] | 1941 | yield key[-1] |
278 | @@ -2172,7 +2172,7 @@ | |||
279 | 2172 | @property | 2172 | @property |
280 | 2173 | def root(self): | 2173 | def root(self): |
281 | 2174 | """Get the root entry.""" | 2174 | """Get the root entry.""" |
283 | 2175 | return self[self.root_id] | 2175 | return self.get_entry(self.root_id) |
284 | 2176 | 2176 | ||
285 | 2177 | 2177 | ||
286 | 2178 | class CHKInventoryDirectory(InventoryDirectory): | 2178 | class CHKInventoryDirectory(InventoryDirectory): |
287 | 2179 | 2179 | ||
288 | === modified file 'breezy/bzr/inventorytree.py' | |||
289 | --- breezy/bzr/inventorytree.py 2018-03-24 23:49:53 +0000 | |||
290 | +++ breezy/bzr/inventorytree.py 2018-03-25 02:37:06 +0000 | |||
291 | @@ -300,7 +300,7 @@ | |||
292 | 300 | with self.lock_read(): | 300 | with self.lock_read(): |
293 | 301 | inv, inv_file_id = self._path2inv_file_id(path, file_id) | 301 | inv, inv_file_id = self._path2inv_file_id(path, file_id) |
294 | 302 | try: | 302 | try: |
296 | 303 | ie = inv[inv_file_id] | 303 | ie = inv.get_entry(inv_file_id) |
297 | 304 | except errors.NoSuchId: | 304 | except errors.NoSuchId: |
298 | 305 | raise errors.NoSuchFile(path) | 305 | raise errors.NoSuchFile(path) |
299 | 306 | else: | 306 | else: |
300 | @@ -807,7 +807,7 @@ | |||
301 | 807 | def get_file_mtime(self, path, file_id=None): | 807 | def get_file_mtime(self, path, file_id=None): |
302 | 808 | inv, inv_file_id = self._path2inv_file_id(path, file_id) | 808 | inv, inv_file_id = self._path2inv_file_id(path, file_id) |
303 | 809 | try: | 809 | try: |
305 | 810 | ie = inv[inv_file_id] | 810 | ie = inv.get_entry(inv_file_id) |
306 | 811 | except errors.NoSuchId: | 811 | except errors.NoSuchId: |
307 | 812 | raise errors.NoSuchFile(path) | 812 | raise errors.NoSuchFile(path) |
308 | 813 | try: | 813 | try: |
309 | @@ -818,11 +818,11 @@ | |||
310 | 818 | 818 | ||
311 | 819 | def get_file_size(self, path, file_id=None): | 819 | def get_file_size(self, path, file_id=None): |
312 | 820 | inv, inv_file_id = self._path2inv_file_id(path, file_id) | 820 | inv, inv_file_id = self._path2inv_file_id(path, file_id) |
314 | 821 | return inv[inv_file_id].text_size | 821 | return inv.get_entry(inv_file_id).text_size |
315 | 822 | 822 | ||
316 | 823 | def get_file_sha1(self, path, file_id=None, stat_value=None): | 823 | def get_file_sha1(self, path, file_id=None, stat_value=None): |
317 | 824 | inv, inv_file_id = self._path2inv_file_id(path, file_id) | 824 | inv, inv_file_id = self._path2inv_file_id(path, file_id) |
319 | 825 | ie = inv[inv_file_id] | 825 | ie = inv.get_entry(inv_file_id) |
320 | 826 | if ie.kind == "file": | 826 | if ie.kind == "file": |
321 | 827 | return ie.text_sha1 | 827 | return ie.text_sha1 |
322 | 828 | return None | 828 | return None |
323 | @@ -830,14 +830,14 @@ | |||
324 | 830 | def get_file_revision(self, path, file_id=None): | 830 | def get_file_revision(self, path, file_id=None): |
325 | 831 | inv, inv_file_id = self._path2inv_file_id(path, file_id) | 831 | inv, inv_file_id = self._path2inv_file_id(path, file_id) |
326 | 832 | try: | 832 | try: |
328 | 833 | ie = inv[inv_file_id] | 833 | ie = inv.get_entry(inv_file_id) |
329 | 834 | except errors.NoSuchId: | 834 | except errors.NoSuchId: |
330 | 835 | raise errors.NoSuchFile(path) | 835 | raise errors.NoSuchFile(path) |
331 | 836 | return ie.revision | 836 | return ie.revision |
332 | 837 | 837 | ||
333 | 838 | def is_executable(self, path, file_id=None): | 838 | def is_executable(self, path, file_id=None): |
334 | 839 | inv, inv_file_id = self._path2inv_file_id(path, file_id) | 839 | inv, inv_file_id = self._path2inv_file_id(path, file_id) |
336 | 840 | ie = inv[inv_file_id] | 840 | ie = inv.get_entry(inv_file_id) |
337 | 841 | if ie.kind != "file": | 841 | if ie.kind != "file": |
338 | 842 | return False | 842 | return False |
339 | 843 | return ie.executable | 843 | return ie.executable |
340 | @@ -864,13 +864,13 @@ | |||
341 | 864 | 864 | ||
342 | 865 | def get_symlink_target(self, path, file_id=None): | 865 | def get_symlink_target(self, path, file_id=None): |
343 | 866 | inv, inv_file_id = self._path2inv_file_id(path, file_id) | 866 | inv, inv_file_id = self._path2inv_file_id(path, file_id) |
345 | 867 | ie = inv[inv_file_id] | 867 | ie = inv.get_entry(inv_file_id) |
346 | 868 | # Inventories store symlink targets in unicode | 868 | # Inventories store symlink targets in unicode |
347 | 869 | return ie.symlink_target | 869 | return ie.symlink_target |
348 | 870 | 870 | ||
349 | 871 | def get_reference_revision(self, path, file_id=None): | 871 | def get_reference_revision(self, path, file_id=None): |
350 | 872 | inv, inv_file_id = self._path2inv_file_id(path, file_id) | 872 | inv, inv_file_id = self._path2inv_file_id(path, file_id) |
352 | 873 | return inv[inv_file_id].reference_revision | 873 | return inv.get_entry(inv_file_id).reference_revision |
353 | 874 | 874 | ||
354 | 875 | def get_root_id(self): | 875 | def get_root_id(self): |
355 | 876 | if self.root_inventory.root: | 876 | if self.root_inventory.root: |
356 | @@ -879,7 +879,7 @@ | |||
357 | 879 | def kind(self, path, file_id=None): | 879 | def kind(self, path, file_id=None): |
358 | 880 | inv, inv_file_id = self._path2inv_file_id(path, file_id) | 880 | inv, inv_file_id = self._path2inv_file_id(path, file_id) |
359 | 881 | try: | 881 | try: |
361 | 882 | return inv[inv_file_id].kind | 882 | return inv.get_entry(inv_file_id).kind |
362 | 883 | except errors.NoSuchId: | 883 | except errors.NoSuchId: |
363 | 884 | raise errors.NoSuchFile(path) | 884 | raise errors.NoSuchFile(path) |
364 | 885 | 885 | ||
365 | @@ -888,7 +888,7 @@ | |||
366 | 888 | inv, file_id = self._path2inv_file_id(path) | 888 | inv, file_id = self._path2inv_file_id(path) |
367 | 889 | if file_id is None: | 889 | if file_id is None: |
368 | 890 | return ('missing', None, None, None) | 890 | return ('missing', None, None, None) |
370 | 891 | entry = inv[file_id] | 891 | entry = inv.get_entry(file_id) |
371 | 892 | kind = entry.kind | 892 | kind = entry.kind |
372 | 893 | if kind == 'file': | 893 | if kind == 'file': |
373 | 894 | return (kind, entry.text_size, entry.executable, entry.text_sha1) | 894 | return (kind, entry.text_size, entry.executable, entry.text_sha1) |
374 | @@ -918,7 +918,7 @@ | |||
375 | 918 | else: | 918 | else: |
376 | 919 | relroot = "" | 919 | relroot = "" |
377 | 920 | # FIXME: stash the node in pending | 920 | # FIXME: stash the node in pending |
379 | 921 | entry = inv[currentdir[4]] | 921 | entry = inv.get_entry(currentdir[4]) |
380 | 922 | for name, child in entry.sorted_children(): | 922 | for name, child in entry.sorted_children(): |
381 | 923 | toppath = relroot + name | 923 | toppath = relroot + name |
382 | 924 | dirblock.append((toppath, name, child.kind, None, | 924 | dirblock.append((toppath, name, child.kind, None, |
383 | 925 | 925 | ||
384 | === modified file 'breezy/bzr/vf_repository.py' | |||
385 | --- breezy/bzr/vf_repository.py 2018-03-24 23:49:53 +0000 | |||
386 | +++ breezy/bzr/vf_repository.py 2018-03-25 02:37:06 +0000 | |||
387 | @@ -258,7 +258,7 @@ | |||
388 | 258 | result = (None, path, ie.file_id, ie) | 258 | result = (None, path, ie.file_id, ie) |
389 | 259 | self._basis_delta.append(result) | 259 | self._basis_delta.append(result) |
390 | 260 | return result | 260 | return result |
392 | 261 | elif ie != basis_inv[ie.file_id]: | 261 | elif ie != basis_inv.get_entry(ie.file_id): |
393 | 262 | # common but altered | 262 | # common but altered |
394 | 263 | # TODO: avoid tis id2path call. | 263 | # TODO: avoid tis id2path call. |
395 | 264 | result = (basis_inv.id2path(ie.file_id), path, ie.file_id, ie) | 264 | result = (basis_inv.id2path(ie.file_id), path, ie.file_id, ie) |
396 | @@ -345,7 +345,7 @@ | |||
397 | 345 | continue | 345 | continue |
398 | 346 | if change[2] not in merged_ids: | 346 | if change[2] not in merged_ids: |
399 | 347 | if change[0] is not None: | 347 | if change[0] is not None: |
401 | 348 | basis_entry = basis_inv[change[2]] | 348 | basis_entry = basis_inv.get_entry(change[2]) |
402 | 349 | merged_ids[change[2]] = [ | 349 | merged_ids[change[2]] = [ |
403 | 350 | # basis revid | 350 | # basis revid |
404 | 351 | basis_entry.revision, | 351 | basis_entry.revision, |
405 | @@ -371,7 +371,7 @@ | |||
406 | 371 | for change in iter_changes: | 371 | for change in iter_changes: |
407 | 372 | # This probably looks up in basis_inv way to much. | 372 | # This probably looks up in basis_inv way to much. |
408 | 373 | if change[1][0] is not None: | 373 | if change[1][0] is not None: |
410 | 374 | head_candidate = [basis_inv[change[0]].revision] | 374 | head_candidate = [basis_inv.get_entry(change[0]).revision] |
411 | 375 | else: | 375 | else: |
412 | 376 | head_candidate = [] | 376 | head_candidate = [] |
413 | 377 | changes[change[0]] = change, merged_ids.get(change[0], | 377 | changes[change[0]] = change, merged_ids.get(change[0], |
414 | @@ -391,7 +391,7 @@ | |||
415 | 391 | # changed_content, versioned, parent, name, kind, | 391 | # changed_content, versioned, parent, name, kind, |
416 | 392 | # executable) | 392 | # executable) |
417 | 393 | try: | 393 | try: |
419 | 394 | basis_entry = basis_inv[file_id] | 394 | basis_entry = basis_inv.get_entry(file_id) |
420 | 395 | except errors.NoSuchId: | 395 | except errors.NoSuchId: |
421 | 396 | # a change from basis->some_parents but file_id isn't in basis | 396 | # a change from basis->some_parents but file_id isn't in basis |
422 | 397 | # so was new in the merge, which means it must have changed | 397 | # so was new in the merge, which means it must have changed |
423 | @@ -1330,7 +1330,7 @@ | |||
424 | 1330 | inv = self.revision_tree(parent_id).root_inventory | 1330 | inv = self.revision_tree(parent_id).root_inventory |
425 | 1331 | inventory_cache[parent_id] = inv | 1331 | inventory_cache[parent_id] = inv |
426 | 1332 | try: | 1332 | try: |
428 | 1333 | parent_entry = inv[text_key[0]] | 1333 | parent_entry = inv.get_entry(text_key[0]) |
429 | 1334 | except (KeyError, errors.NoSuchId): | 1334 | except (KeyError, errors.NoSuchId): |
430 | 1335 | parent_entry = None | 1335 | parent_entry = None |
431 | 1336 | if parent_entry is not None: | 1336 | if parent_entry is not None: |
432 | @@ -2610,7 +2610,7 @@ | |||
433 | 2610 | for file_key in list(texts_possibly_new_in_tree): | 2610 | for file_key in list(texts_possibly_new_in_tree): |
434 | 2611 | file_id, file_revision = file_key | 2611 | file_id, file_revision = file_key |
435 | 2612 | try: | 2612 | try: |
437 | 2613 | entry = basis_inv[file_id] | 2613 | entry = basis_inv.get_entry(file_id) |
438 | 2614 | except errors.NoSuchId: | 2614 | except errors.NoSuchId: |
439 | 2615 | continue | 2615 | continue |
440 | 2616 | if entry.revision == file_revision: | 2616 | if entry.revision == file_revision: |
441 | 2617 | 2617 | ||
442 | === modified file 'breezy/bzr/workingtree.py' | |||
443 | --- breezy/bzr/workingtree.py 2018-03-24 17:48:04 +0000 | |||
444 | +++ breezy/bzr/workingtree.py 2018-03-25 02:37:06 +0000 | |||
445 | @@ -331,15 +331,15 @@ | |||
446 | 331 | self._inventory_is_modified = True | 331 | self._inventory_is_modified = True |
447 | 332 | # we preserve the root inventory entry object, but | 332 | # we preserve the root inventory entry object, but |
448 | 333 | # unlinkit from the byid index | 333 | # unlinkit from the byid index |
450 | 334 | del inv._byid[inv.root.file_id] | 334 | inv.delete(inv.root.file_id) |
451 | 335 | inv.root.file_id = file_id | 335 | inv.root.file_id = file_id |
452 | 336 | # and link it into the index with the new changed id. | 336 | # and link it into the index with the new changed id. |
453 | 337 | inv._byid[inv.root.file_id] = inv.root | 337 | inv._byid[inv.root.file_id] = inv.root |
454 | 338 | # and finally update all children to reference the new id. | 338 | # and finally update all children to reference the new id. |
455 | 339 | # XXX: this should be safe to just look at the root.children | 339 | # XXX: this should be safe to just look at the root.children |
456 | 340 | # list, not the WHOLE INVENTORY. | 340 | # list, not the WHOLE INVENTORY. |
459 | 341 | for fid in inv: | 341 | for fid in inv.iter_all_ids(): |
460 | 342 | entry = inv[fid] | 342 | entry = inv.get_entry(fid) |
461 | 343 | if entry.parent_id == orig_root_id: | 343 | if entry.parent_id == orig_root_id: |
462 | 344 | entry.parent_id = inv.root.file_id | 344 | entry.parent_id = inv.root.file_id |
463 | 345 | 345 | ||
464 | @@ -744,7 +744,7 @@ | |||
465 | 744 | # For unversioned files on win32, we just assume they are not | 744 | # For unversioned files on win32, we just assume they are not |
466 | 745 | # executable | 745 | # executable |
467 | 746 | return False | 746 | return False |
469 | 747 | return inv[file_id].executable | 747 | return inv.get_entry(file_id).executable |
470 | 748 | 748 | ||
471 | 749 | def _is_executable_from_path_and_stat_from_stat(self, path, stat_result): | 749 | def _is_executable_from_path_and_stat_from_stat(self, path, stat_result): |
472 | 750 | mode = stat_result.st_mode | 750 | mode = stat_result.st_mode |
473 | @@ -753,7 +753,7 @@ | |||
474 | 753 | def is_executable(self, path, file_id=None): | 753 | def is_executable(self, path, file_id=None): |
475 | 754 | if not self._supports_executable(): | 754 | if not self._supports_executable(): |
476 | 755 | inv, inv_file_id = self._path2inv_file_id(path, file_id) | 755 | inv, inv_file_id = self._path2inv_file_id(path, file_id) |
478 | 756 | return inv[inv_file_id].executable | 756 | return inv.get_entry(inv_file_id).executable |
479 | 757 | else: | 757 | else: |
480 | 758 | mode = os.lstat(self.abspath(path)).st_mode | 758 | mode = os.lstat(self.abspath(path)).st_mode |
481 | 759 | return bool(stat.S_ISREG(mode) and stat.S_IEXEC & mode) | 759 | return bool(stat.S_ISREG(mode) and stat.S_IEXEC & mode) |
482 | @@ -985,7 +985,7 @@ | |||
483 | 985 | child_inv = inventory.Inventory(root_id=None) | 985 | child_inv = inventory.Inventory(root_id=None) |
484 | 986 | if file_id is None: | 986 | if file_id is None: |
485 | 987 | file_id = self.path2id(sub_path) | 987 | file_id = self.path2id(sub_path) |
487 | 988 | new_root = my_inv[file_id] | 988 | new_root = my_inv.get_entry(file_id) |
488 | 989 | my_inv.remove_recursive_id(file_id) | 989 | my_inv.remove_recursive_id(file_id) |
489 | 990 | new_root.parent_id = None | 990 | new_root.parent_id = None |
490 | 991 | child_inv.add(new_root) | 991 | child_inv.add(new_root) |
491 | @@ -1061,7 +1061,7 @@ | |||
492 | 1061 | # absolute path | 1061 | # absolute path |
493 | 1062 | fap = from_dir_abspath + '/' + f | 1062 | fap = from_dir_abspath + '/' + f |
494 | 1063 | 1063 | ||
496 | 1064 | dir_ie = inv[from_dir_id] | 1064 | dir_ie = inv.get_entry(from_dir_id) |
497 | 1065 | if dir_ie.kind == 'directory': | 1065 | if dir_ie.kind == 'directory': |
498 | 1066 | f_ie = dir_ie.children.get(f) | 1066 | f_ie = dir_ie.children.get(f) |
499 | 1067 | else: | 1067 | else: |
500 | @@ -1178,7 +1178,7 @@ | |||
501 | 1178 | raise errors.BzrMoveFailedError( | 1178 | raise errors.BzrMoveFailedError( |
502 | 1179 | '', to_dir, errors.NotVersionedError(path=to_dir)) | 1179 | '', to_dir, errors.NotVersionedError(path=to_dir)) |
503 | 1180 | 1180 | ||
505 | 1181 | to_dir_ie = to_inv[to_dir_id] | 1181 | to_dir_ie = to_inv.get_entry(to_dir_id) |
506 | 1182 | if to_dir_ie.kind != 'directory': | 1182 | if to_dir_ie.kind != 'directory': |
507 | 1183 | raise errors.BzrMoveFailedError( | 1183 | raise errors.BzrMoveFailedError( |
508 | 1184 | '', to_dir, errors.NotADirectory(to_abs)) | 1184 | '', to_dir, errors.NotADirectory(to_abs)) |
509 | @@ -1191,7 +1191,7 @@ | |||
510 | 1191 | raise errors.BzrMoveFailedError(from_rel, to_dir, | 1191 | raise errors.BzrMoveFailedError(from_rel, to_dir, |
511 | 1192 | errors.NotVersionedError(path=from_rel)) | 1192 | errors.NotVersionedError(path=from_rel)) |
512 | 1193 | 1193 | ||
514 | 1194 | from_entry = from_inv[from_id] | 1194 | from_entry = from_inv.get_entry(from_id) |
515 | 1195 | from_parent_id = from_entry.parent_id | 1195 | from_parent_id = from_entry.parent_id |
516 | 1196 | to_rel = osutils.pathjoin(to_dir, from_tail) | 1196 | to_rel = osutils.pathjoin(to_dir, from_tail) |
517 | 1197 | rename_entry = InventoryWorkingTree._RenameEntry( | 1197 | rename_entry = InventoryWorkingTree._RenameEntry( |
518 | @@ -1258,11 +1258,11 @@ | |||
519 | 1258 | from_rel, to_rel, | 1258 | from_rel, to_rel, |
520 | 1259 | errors.NotVersionedError(path=from_rel)) | 1259 | errors.NotVersionedError(path=from_rel)) |
521 | 1260 | # put entry back in the inventory so we can rename it | 1260 | # put entry back in the inventory so we can rename it |
523 | 1261 | from_entry = basis_tree.root_inventory[from_id].copy() | 1261 | from_entry = basis_tree.root_inventory.get_entry(from_id).copy() |
524 | 1262 | from_inv.add(from_entry) | 1262 | from_inv.add(from_entry) |
525 | 1263 | else: | 1263 | else: |
526 | 1264 | from_inv, from_inv_id = self._unpack_file_id(from_id) | 1264 | from_inv, from_inv_id = self._unpack_file_id(from_id) |
528 | 1265 | from_entry = from_inv[from_inv_id] | 1265 | from_entry = from_inv.get_entry(from_inv_id) |
529 | 1266 | from_parent_id = from_entry.parent_id | 1266 | from_parent_id = from_entry.parent_id |
530 | 1267 | to_dir, to_tail = os.path.split(to_rel) | 1267 | to_dir, to_tail = os.path.split(to_rel) |
531 | 1268 | to_inv, to_dir_id = self._path2inv_file_id(to_dir) | 1268 | to_inv, to_dir_id = self._path2inv_file_id(to_dir) |
532 | @@ -1464,7 +1464,7 @@ | |||
533 | 1464 | inv, inv_file_id = self._path2inv_file_id(path, file_id) | 1464 | inv, inv_file_id = self._path2inv_file_id(path, file_id) |
534 | 1465 | if inv_file_id is None: | 1465 | if inv_file_id is None: |
535 | 1466 | raise errors.NoSuchFile(self, path) | 1466 | raise errors.NoSuchFile(self, path) |
537 | 1467 | return inv[inv_file_id].kind | 1467 | return inv.get_entry(inv_file_id).kind |
538 | 1468 | 1468 | ||
539 | 1469 | def extras(self): | 1469 | def extras(self): |
540 | 1470 | """Yield all unversioned files in this WorkingTree. | 1470 | """Yield all unversioned files in this WorkingTree. |
541 | @@ -1658,7 +1658,7 @@ | |||
542 | 1658 | else: | 1658 | else: |
543 | 1659 | relroot = "" | 1659 | relroot = "" |
544 | 1660 | # FIXME: stash the node in pending | 1660 | # FIXME: stash the node in pending |
546 | 1661 | entry = inv[top_id] | 1661 | entry = inv.get_entry(top_id) |
547 | 1662 | if entry.kind == 'directory': | 1662 | if entry.kind == 'directory': |
548 | 1663 | for name, child in entry.sorted_children(): | 1663 | for name, child in entry.sorted_children(): |
549 | 1664 | dirblock.append((relroot + name, name, child.kind, None, | 1664 | dirblock.append((relroot + name, name, child.kind, None, |
550 | 1665 | 1665 | ||
551 | === modified file 'breezy/bzr/workingtree_4.py' | |||
552 | --- breezy/bzr/workingtree_4.py 2018-03-24 17:48:04 +0000 | |||
553 | +++ breezy/bzr/workingtree_4.py 2018-03-25 02:37:06 +0000 | |||
554 | @@ -693,7 +693,7 @@ | |||
555 | 693 | update_inventory = True | 693 | update_inventory = True |
556 | 694 | inv = self.root_inventory | 694 | inv = self.root_inventory |
557 | 695 | to_dir_id = to_entry[0][2] | 695 | to_dir_id = to_entry[0][2] |
559 | 696 | to_dir_ie = inv[to_dir_id] | 696 | to_dir_ie = inv.get_entry(to_dir_id) |
560 | 697 | else: | 697 | else: |
561 | 698 | update_inventory = False | 698 | update_inventory = False |
562 | 699 | 699 | ||
563 | @@ -782,7 +782,7 @@ | |||
564 | 782 | # to rollback | 782 | # to rollback |
565 | 783 | if update_inventory: | 783 | if update_inventory: |
566 | 784 | # rename the entry | 784 | # rename the entry |
568 | 785 | from_entry = inv[from_id] | 785 | from_entry = inv.get_entry(from_id) |
569 | 786 | current_parent = from_entry.parent_id | 786 | current_parent = from_entry.parent_id |
570 | 787 | inv.rename(from_id, to_dir_id, from_tail) | 787 | inv.rename(from_id, to_dir_id, from_tail) |
571 | 788 | rollbacks.add_cleanup( | 788 | rollbacks.add_cleanup( |
572 | @@ -1885,7 +1885,7 @@ | |||
573 | 1885 | def get_file_revision(self, path, file_id=None): | 1885 | def get_file_revision(self, path, file_id=None): |
574 | 1886 | with self.lock_read(): | 1886 | with self.lock_read(): |
575 | 1887 | inv, inv_file_id = self._path2inv_file_id(path, file_id) | 1887 | inv, inv_file_id = self._path2inv_file_id(path, file_id) |
577 | 1888 | return inv[inv_file_id].revision | 1888 | return inv.get_entry(inv_file_id).revision |
578 | 1889 | 1889 | ||
579 | 1890 | def get_file(self, path, file_id=None): | 1890 | def get_file(self, path, file_id=None): |
580 | 1891 | return BytesIO(self.get_file_text(path, file_id)) | 1891 | return BytesIO(self.get_file_text(path, file_id)) |
581 | @@ -1893,7 +1893,7 @@ | |||
582 | 1893 | def get_file_size(self, path, file_id=None): | 1893 | def get_file_size(self, path, file_id=None): |
583 | 1894 | """See Tree.get_file_size""" | 1894 | """See Tree.get_file_size""" |
584 | 1895 | inv, inv_file_id = self._path2inv_file_id(path, file_id) | 1895 | inv, inv_file_id = self._path2inv_file_id(path, file_id) |
586 | 1896 | return inv[inv_file_id].text_size | 1896 | return inv.get_entry(inv_file_id).text_size |
587 | 1897 | 1897 | ||
588 | 1898 | def get_file_text(self, path, file_id=None): | 1898 | def get_file_text(self, path, file_id=None): |
589 | 1899 | content = None | 1899 | content = None |
590 | @@ -1911,7 +1911,7 @@ | |||
591 | 1911 | 1911 | ||
592 | 1912 | def get_reference_revision(self, path, file_id=None): | 1912 | def get_reference_revision(self, path, file_id=None): |
593 | 1913 | inv, inv_file_id = self._path2inv_file_id(path, file_id) | 1913 | inv, inv_file_id = self._path2inv_file_id(path, file_id) |
595 | 1914 | return inv[inv_file_id].reference_revision | 1914 | return inv.get_entry(inv_file_id).reference_revision |
596 | 1915 | 1915 | ||
597 | 1916 | def iter_files_bytes(self, desired_files): | 1916 | def iter_files_bytes(self, desired_files): |
598 | 1917 | """See Tree.iter_files_bytes. | 1917 | """See Tree.iter_files_bytes. |
599 | @@ -1976,7 +1976,7 @@ | |||
600 | 1976 | inv, inv_file_id = self._path2inv_file_id(path) | 1976 | inv, inv_file_id = self._path2inv_file_id(path) |
601 | 1977 | if inv_file_id is None: | 1977 | if inv_file_id is None: |
602 | 1978 | return ('missing', None, None, None) | 1978 | return ('missing', None, None, None) |
604 | 1979 | entry = inv[inv_file_id] | 1979 | entry = inv.get_entry(inv_file_id) |
605 | 1980 | kind = entry.kind | 1980 | kind = entry.kind |
606 | 1981 | if kind == 'file': | 1981 | if kind == 'file': |
607 | 1982 | return (kind, entry.text_size, entry.executable, entry.text_sha1) | 1982 | return (kind, entry.text_size, entry.executable, entry.text_sha1) |
608 | @@ -1987,7 +1987,7 @@ | |||
609 | 1987 | 1987 | ||
610 | 1988 | def is_executable(self, path, file_id=None): | 1988 | def is_executable(self, path, file_id=None): |
611 | 1989 | inv, inv_file_id = self._path2inv_file_id(path, file_id) | 1989 | inv, inv_file_id = self._path2inv_file_id(path, file_id) |
613 | 1990 | ie = inv[inv_file_id] | 1990 | ie = inv.get_entry(inv_file_id) |
614 | 1991 | if ie.kind != "file": | 1991 | if ie.kind != "file": |
615 | 1992 | return False | 1992 | return False |
616 | 1993 | return ie.executable | 1993 | return ie.executable |
617 | @@ -2081,7 +2081,7 @@ | |||
618 | 2081 | else: | 2081 | else: |
619 | 2082 | relroot = "" | 2082 | relroot = "" |
620 | 2083 | # FIXME: stash the node in pending | 2083 | # FIXME: stash the node in pending |
622 | 2084 | entry = inv[file_id] | 2084 | entry = inv.get_entry(file_id) |
623 | 2085 | for name, child in entry.sorted_children(): | 2085 | for name, child in entry.sorted_children(): |
624 | 2086 | toppath = relroot + name | 2086 | toppath = relroot + name |
625 | 2087 | dirblock.append((toppath, name, child.kind, None, | 2087 | dirblock.append((toppath, name, child.kind, None, |
626 | 2088 | 2088 | ||
627 | === modified file 'breezy/merge.py' | |||
628 | --- breezy/merge.py 2018-03-24 18:22:08 +0000 | |||
629 | +++ breezy/merge.py 2018-03-25 02:37:06 +0000 | |||
630 | @@ -956,7 +956,7 @@ | |||
631 | 956 | lca_paths.append(path) | 956 | lca_paths.append(path) |
632 | 957 | 957 | ||
633 | 958 | try: | 958 | try: |
635 | 959 | base_ie = base_inventory[file_id] | 959 | base_ie = base_inventory.get_entry(file_id) |
636 | 960 | except errors.NoSuchId: | 960 | except errors.NoSuchId: |
637 | 961 | base_ie = _none_entry | 961 | base_ie = _none_entry |
638 | 962 | base_path = None | 962 | base_path = None |
639 | @@ -964,7 +964,7 @@ | |||
640 | 964 | base_path = self.base_tree.id2path(file_id) | 964 | base_path = self.base_tree.id2path(file_id) |
641 | 965 | 965 | ||
642 | 966 | try: | 966 | try: |
644 | 967 | this_ie = this_inventory[file_id] | 967 | this_ie = this_inventory.get_entry(file_id) |
645 | 968 | except errors.NoSuchId: | 968 | except errors.NoSuchId: |
646 | 969 | this_ie = _none_entry | 969 | this_ie = _none_entry |
647 | 970 | this_path = None | 970 | this_path = None |
648 | @@ -1193,7 +1193,7 @@ | |||
649 | 1193 | def merge_names(self, file_id): | 1193 | def merge_names(self, file_id): |
650 | 1194 | def get_entry(tree): | 1194 | def get_entry(tree): |
651 | 1195 | try: | 1195 | try: |
653 | 1196 | return tree.root_inventory[file_id] | 1196 | return tree.root_inventory.get_entry(file_id) |
654 | 1197 | except errors.NoSuchId: | 1197 | except errors.NoSuchId: |
655 | 1198 | return None | 1198 | return None |
656 | 1199 | this_entry = get_entry(self.this_tree) | 1199 | this_entry = get_entry(self.this_tree) |
657 | @@ -1894,7 +1894,7 @@ | |||
658 | 1894 | # XXX: The error would be clearer if it gave the URL of the source | 1894 | # XXX: The error would be clearer if it gave the URL of the source |
659 | 1895 | # branch, but we don't have a reference to that here. | 1895 | # branch, but we don't have a reference to that here. |
660 | 1896 | raise PathNotInTree(self._source_subpath, "Source tree") | 1896 | raise PathNotInTree(self._source_subpath, "Source tree") |
662 | 1897 | subdir = other_inv[subdir_id] | 1897 | subdir = other_inv.get_entry(subdir_id) |
663 | 1898 | parent_in_target = osutils.dirname(self._target_subdir) | 1898 | parent_in_target = osutils.dirname(self._target_subdir) |
664 | 1899 | target_id = self.this_tree.path2id(parent_in_target) | 1899 | target_id = self.this_tree.path2id(parent_in_target) |
665 | 1900 | if target_id is None: | 1900 | if target_id is None: |
666 | 1901 | 1901 | ||
667 | === modified file 'breezy/plugins/fastimport/bzr_commit_handler.py' | |||
668 | --- breezy/plugins/fastimport/bzr_commit_handler.py 2018-03-24 23:49:53 +0000 | |||
669 | +++ breezy/plugins/fastimport/bzr_commit_handler.py 2018-03-25 02:37:06 +0000 | |||
670 | @@ -375,7 +375,7 @@ | |||
671 | 375 | return | 375 | return |
672 | 376 | # Record it | 376 | # Record it |
673 | 377 | try: | 377 | try: |
675 | 378 | old_ie = inv[file_id] | 378 | old_ie = inv.get_entry(file_id) |
676 | 379 | except errors.NoSuchId: | 379 | except errors.NoSuchId: |
677 | 380 | try: | 380 | try: |
678 | 381 | self.record_new(path, ie) | 381 | self.record_new(path, ie) |
679 | @@ -438,7 +438,7 @@ | |||
680 | 438 | raise KeyError | 438 | raise KeyError |
681 | 439 | if file_id is None: | 439 | if file_id is None: |
682 | 440 | raise KeyError | 440 | raise KeyError |
684 | 441 | result = inv[file_id] | 441 | result = inv.get_entry(file_id) |
685 | 442 | # dirname must be a directory for us to return it | 442 | # dirname must be a directory for us to return it |
686 | 443 | if result.kind == 'directory': | 443 | if result.kind == 'directory': |
687 | 444 | self.directory_entries[dirname] = result | 444 | self.directory_entries[dirname] = result |
688 | @@ -459,7 +459,7 @@ | |||
689 | 459 | self.mutter("ignoring delete of %s as not in inventory", path) | 459 | self.mutter("ignoring delete of %s as not in inventory", path) |
690 | 460 | return | 460 | return |
691 | 461 | try: | 461 | try: |
693 | 462 | ie = inv[file_id] | 462 | ie = inv.get_entry(file_id) |
694 | 463 | except errors.NoSuchId: | 463 | except errors.NoSuchId: |
695 | 464 | self.mutter("ignoring delete of %s as not in inventory", path) | 464 | self.mutter("ignoring delete of %s as not in inventory", path) |
696 | 465 | return | 465 | return |
697 | @@ -479,7 +479,7 @@ | |||
698 | 479 | self.warning("ignoring copy of %s to %s - source does not exist", | 479 | self.warning("ignoring copy of %s to %s - source does not exist", |
699 | 480 | src_path, dest_path) | 480 | src_path, dest_path) |
700 | 481 | return | 481 | return |
702 | 482 | ie = inv[file_id] | 482 | ie = inv.get_entry(file_id) |
703 | 483 | kind = ie.kind | 483 | kind = ie.kind |
704 | 484 | if kind == 'file': | 484 | if kind == 'file': |
705 | 485 | if newly_changed: | 485 | if newly_changed: |
706 | @@ -509,11 +509,11 @@ | |||
707 | 509 | "ignoring rename of %s to %s - old path does not exist" % | 509 | "ignoring rename of %s to %s - old path does not exist" % |
708 | 510 | (old_path, new_path)) | 510 | (old_path, new_path)) |
709 | 511 | return | 511 | return |
711 | 512 | ie = inv[file_id] | 512 | ie = inv.get_entry(file_id) |
712 | 513 | rev_id = ie.revision | 513 | rev_id = ie.revision |
713 | 514 | new_file_id = inv.path2id(new_path) | 514 | new_file_id = inv.path2id(new_path) |
714 | 515 | if new_file_id is not None: | 515 | if new_file_id is not None: |
716 | 516 | self.record_delete(new_path, inv[new_file_id]) | 516 | self.record_delete(new_path, inv.get_entry(new_file_id)) |
717 | 517 | self.record_rename(old_path, new_path, file_id, ie) | 517 | self.record_rename(old_path, new_path, file_id, ie) |
718 | 518 | 518 | ||
719 | 519 | # The revision-id for this entry will be/has been updated and | 519 | # The revision-id for this entry will be/has been updated and |
720 | @@ -713,7 +713,7 @@ | |||
721 | 713 | file_id = new_inv.path2id(dir) | 713 | file_id = new_inv.path2id(dir) |
722 | 714 | if file_id is None: | 714 | if file_id is None: |
723 | 715 | continue | 715 | continue |
725 | 716 | ie = new_inv[file_id] | 716 | ie = new_inv.get_entry(file_id) |
726 | 717 | if ie.kind != 'directory': | 717 | if ie.kind != 'directory': |
727 | 718 | continue | 718 | continue |
728 | 719 | if len(ie.children) == 0: | 719 | if len(ie.children) == 0: |
729 | @@ -734,7 +734,7 @@ | |||
730 | 734 | else: | 734 | else: |
731 | 735 | new_inv = inventory.Inventory(revision_id=self.revision_id) | 735 | new_inv = inventory.Inventory(revision_id=self.revision_id) |
732 | 736 | # This is set in the delta so remove it to prevent a duplicate | 736 | # This is set in the delta so remove it to prevent a duplicate |
734 | 737 | del new_inv[inventory.ROOT_ID] | 737 | new_inv.delete(inventory.ROOT_ID) |
735 | 738 | try: | 738 | try: |
736 | 739 | new_inv.apply_delta(delta) | 739 | new_inv.apply_delta(delta) |
737 | 740 | except errors.InconsistentDelta: | 740 | except errors.InconsistentDelta: |
738 | 741 | 741 | ||
739 | === modified file 'breezy/plugins/fastimport/revision_store.py' | |||
740 | --- breezy/plugins/fastimport/revision_store.py 2018-03-10 13:56:26 +0000 | |||
741 | +++ breezy/plugins/fastimport/revision_store.py 2018-03-25 02:37:06 +0000 | |||
742 | @@ -79,7 +79,7 @@ | |||
743 | 79 | except KeyError: | 79 | except KeyError: |
744 | 80 | # The content wasn't shown as 'new'. Just validate this fact | 80 | # The content wasn't shown as 'new'. Just validate this fact |
745 | 81 | assert file_id not in self._new_info_by_id | 81 | assert file_id not in self._new_info_by_id |
747 | 82 | old_ie = self._basis_inv[file_id] | 82 | old_ie = self._basis_inv.get_entry(file_id) |
748 | 83 | old_text_key = (file_id, old_ie.revision) | 83 | old_text_key = (file_id, old_ie.revision) |
749 | 84 | stream = self._repo.texts.get_record_stream([old_text_key], | 84 | stream = self._repo.texts.get_record_stream([old_text_key], |
750 | 85 | 'unordered', True) | 85 | 'unordered', True) |
751 | @@ -91,7 +91,7 @@ | |||
752 | 91 | if file_id in self._new_info_by_id: | 91 | if file_id in self._new_info_by_id: |
753 | 92 | ie = self._new_info_by_id[file_id][1] | 92 | ie = self._new_info_by_id[file_id][1] |
754 | 93 | return ie.symlink_target | 93 | return ie.symlink_target |
756 | 94 | return self._basis_inv[file_id].symlink_target | 94 | return self._basis_inv.get_entry(file_id).symlink_target |
757 | 95 | 95 | ||
758 | 96 | def get_reference_revision(self, path, file_id=None): | 96 | def get_reference_revision(self, path, file_id=None): |
759 | 97 | raise NotImplementedError(_TreeShim.get_reference_revision) | 97 | raise NotImplementedError(_TreeShim.get_reference_revision) |
760 | @@ -114,7 +114,7 @@ | |||
761 | 114 | # Since the *very* common case is that the file already exists, it | 114 | # Since the *very* common case is that the file already exists, it |
762 | 115 | # probably is better to optimize for that | 115 | # probably is better to optimize for that |
763 | 116 | try: | 116 | try: |
765 | 117 | old_ie = basis_inv[file_id] | 117 | old_ie = basis_inv.get_entry(file_id) |
766 | 118 | except errors.NoSuchId: | 118 | except errors.NoSuchId: |
767 | 119 | old_ie = None | 119 | old_ie = None |
768 | 120 | if ie is None: | 120 | if ie is None: |
769 | @@ -281,12 +281,12 @@ | |||
770 | 281 | heads = [] | 281 | heads = [] |
771 | 282 | for inv in self._rev_parent_invs: | 282 | for inv in self._rev_parent_invs: |
772 | 283 | try: | 283 | try: |
774 | 284 | old_rev = inv[ie.file_id].revision | 284 | old_rev = inv.get_entry(ie.file_id).revision |
775 | 285 | except errors.NoSuchId: | 285 | except errors.NoSuchId: |
776 | 286 | pass | 286 | pass |
777 | 287 | else: | 287 | else: |
778 | 288 | if old_rev in head_set: | 288 | if old_rev in head_set: |
780 | 289 | rev_id = inv[ie.file_id].revision | 289 | rev_id = inv.get_entry(ie.file_id).revision |
781 | 290 | heads.append(rev_id) | 290 | heads.append(rev_id) |
782 | 291 | head_set.remove(rev_id) | 291 | head_set.remove(rev_id) |
783 | 292 | 292 | ||
784 | @@ -511,7 +511,7 @@ | |||
785 | 511 | else: | 511 | else: |
786 | 512 | new_inv = inventory.Inventory(revision_id=revision_id) | 512 | new_inv = inventory.Inventory(revision_id=revision_id) |
787 | 513 | # This is set in the delta so remove it to prevent a duplicate | 513 | # This is set in the delta so remove it to prevent a duplicate |
789 | 514 | del new_inv[inventory.ROOT_ID] | 514 | new_inv.delete(inventory.ROOT_ID) |
790 | 515 | new_inv.apply_delta(inv_delta) | 515 | new_inv.apply_delta(inv_delta) |
791 | 516 | validator = self.repo.add_inventory(revision_id, new_inv, parents) | 516 | validator = self.repo.add_inventory(revision_id, new_inv, parents) |
792 | 517 | return validator, new_inv | 517 | return validator, new_inv |
793 | 518 | 518 | ||
794 | === modified file 'breezy/plugins/repodebug/file_refs.py' | |||
795 | --- breezy/plugins/repodebug/file_refs.py 2017-09-07 08:21:29 +0000 | |||
796 | +++ breezy/plugins/repodebug/file_refs.py 2018-03-25 02:37:06 +0000 | |||
797 | @@ -42,7 +42,7 @@ | |||
798 | 42 | # print len(all_invs) | 42 | # print len(all_invs) |
799 | 43 | for inv in repo.iter_inventories(all_invs, 'unordered'): | 43 | for inv in repo.iter_inventories(all_invs, 'unordered'): |
800 | 44 | try: | 44 | try: |
802 | 45 | entry = inv[file_id] | 45 | entry = inv.get_entry(file_id) |
803 | 46 | except errors.NoSuchId: | 46 | except errors.NoSuchId: |
804 | 47 | # This file doesn't even appear in this inv. | 47 | # This file doesn't even appear in this inv. |
805 | 48 | continue | 48 | continue |
806 | 49 | 49 | ||
807 | === modified file 'breezy/tests/per_interrepository/test_fetch.py' | |||
808 | --- breezy/tests/per_interrepository/test_fetch.py 2017-11-24 09:25:13 +0000 | |||
809 | +++ breezy/tests/per_interrepository/test_fetch.py 2018-03-25 02:37:06 +0000 | |||
810 | @@ -498,7 +498,7 @@ | |||
811 | 498 | source.lock_write() | 498 | source.lock_write() |
812 | 499 | self.addCleanup(source.unlock) | 499 | self.addCleanup(source.unlock) |
813 | 500 | source.start_write_group() | 500 | source.start_write_group() |
815 | 501 | inv['id'].revision = 'b' | 501 | inv.get_entry('id').revision = 'b' |
816 | 502 | inv.revision_id = 'b' | 502 | inv.revision_id = 'b' |
817 | 503 | sha1 = source.add_inventory('b', inv, ['a']) | 503 | sha1 = source.add_inventory('b', inv, ['a']) |
818 | 504 | rev = Revision(timestamp=0, | 504 | rev = Revision(timestamp=0, |
819 | 505 | 505 | ||
820 | === modified file 'breezy/tests/per_intertree/test_compare.py' | |||
821 | --- breezy/tests/per_intertree/test_compare.py 2018-03-24 23:49:53 +0000 | |||
822 | +++ breezy/tests/per_intertree/test_compare.py 2018-03-25 02:37:06 +0000 | |||
823 | @@ -590,7 +590,7 @@ | |||
824 | 590 | (from_basename, to_basename), (kind, None), (False, False)) | 590 | (from_basename, to_basename), (kind, None), (False, False)) |
825 | 591 | 591 | ||
826 | 592 | def deleted(self, tree, file_id): | 592 | def deleted(self, tree, file_id): |
828 | 593 | entry = tree.root_inventory[file_id] | 593 | entry = tree.root_inventory.get_entry(file_id) |
829 | 594 | path = tree.id2path(file_id) | 594 | path = tree.id2path(file_id) |
830 | 595 | return (file_id, (path, None), True, (True, False), (entry.parent_id, None), | 595 | return (file_id, (path, None), True, (True, False), (entry.parent_id, None), |
831 | 596 | (entry.name, None), (entry.kind, None), | 596 | (entry.name, None), (entry.kind, None), |
832 | 597 | 597 | ||
833 | === modified file 'breezy/tests/per_inventory/basics.py' | |||
834 | --- breezy/tests/per_inventory/basics.py 2018-02-17 00:06:35 +0000 | |||
835 | +++ breezy/tests/per_inventory/basics.py 2018-03-25 02:37:06 +0000 | |||
836 | @@ -107,7 +107,7 @@ | |||
837 | 107 | (None, "a", "a-id", self.make_file('a-id', 'a', 'tree-root')), | 107 | (None, "a", "a-id", self.make_file('a-id', 'a', 'tree-root')), |
838 | 108 | ], 'new-rev-1') | 108 | ], 'new-rev-1') |
839 | 109 | self.assertEqual('a', inv.id2path('a-id')) | 109 | self.assertEqual('a', inv.id2path('a-id')) |
841 | 110 | a_ie = inv['a-id'] | 110 | a_ie = inv.get_entry('a-id') |
842 | 111 | b_ie = self.make_file(a_ie.file_id, "b", a_ie.parent_id) | 111 | b_ie = self.make_file(a_ie.file_id, "b", a_ie.parent_id) |
843 | 112 | inv = inv.create_by_apply_delta([("a", "b", "a-id", b_ie)], 'new-rev-2') | 112 | inv = inv.create_by_apply_delta([("a", "b", "a-id", b_ie)], 'new-rev-2') |
844 | 113 | self.assertEqual("b", inv.id2path('a-id')) | 113 | self.assertEqual("b", inv.id2path('a-id')) |
845 | @@ -128,7 +128,7 @@ | |||
846 | 128 | inv = self.make_init_inventory() | 128 | inv = self.make_init_inventory() |
847 | 129 | self.assertTrue(inv.is_root('tree-root')) | 129 | self.assertTrue(inv.is_root('tree-root')) |
848 | 130 | self.assertFalse(inv.is_root('booga')) | 130 | self.assertFalse(inv.is_root('booga')) |
850 | 131 | ie = inv['tree-root'].copy() | 131 | ie = inv.get_entry('tree-root').copy() |
851 | 132 | ie.file_id = 'booga' | 132 | ie.file_id = 'booga' |
852 | 133 | inv = inv.create_by_apply_delta([("", None, "tree-root", None), | 133 | inv = inv.create_by_apply_delta([("", None, "tree-root", None), |
853 | 134 | (None, "", "booga", ie)], 'new-rev-2') | 134 | (None, "", "booga", ie)], 'new-rev-2') |
854 | 135 | 135 | ||
855 | === modified file 'breezy/tests/per_repository_reference/test_add_inventory.py' | |||
856 | --- breezy/tests/per_repository_reference/test_add_inventory.py 2017-05-21 18:10:28 +0000 | |||
857 | +++ breezy/tests/per_repository_reference/test_add_inventory.py 2018-03-25 02:37:06 +0000 | |||
858 | @@ -49,7 +49,7 @@ | |||
859 | 49 | repo.lock_read() | 49 | repo.lock_read() |
860 | 50 | self.addCleanup(repo.unlock) | 50 | self.addCleanup(repo.unlock) |
861 | 51 | inv2 = repo.get_inventory(revid) | 51 | inv2 = repo.get_inventory(revid) |
864 | 52 | content1 = dict((file_id, inv[file_id]) for file_id in inv) | 52 | content1 = dict((file_id, inv.get_entry(file_id)) for file_id in inv.iter_all_ids()) |
865 | 53 | content2 = dict((file_id, inv[file_id]) for file_id in inv2) | 53 | content2 = dict((file_id, inv.get_entry(file_id)) for file_id in inv2.iter_all_ids()) |
866 | 54 | self.assertEqual(content1, content2) | 54 | self.assertEqual(content1, content2) |
867 | 55 | self.assertRaises(errors.NoSuchRevision, base.get_inventory, revid) | 55 | self.assertRaises(errors.NoSuchRevision, base.get_inventory, revid) |
868 | 56 | 56 | ||
869 | === modified file 'breezy/tests/per_repository_vf/test_add_inventory_by_delta.py' | |||
870 | --- breezy/tests/per_repository_vf/test_add_inventory_by_delta.py 2017-05-21 18:10:28 +0000 | |||
871 | +++ breezy/tests/per_repository_vf/test_add_inventory_by_delta.py 2018-03-25 02:37:06 +0000 | |||
872 | @@ -73,9 +73,9 @@ | |||
873 | 73 | for file_id in deletes: | 73 | for file_id in deletes: |
874 | 74 | delta.append((old.id2path(file_id), None, file_id, None)) | 74 | delta.append((old.id2path(file_id), None, file_id, None)) |
875 | 75 | for file_id in adds: | 75 | for file_id in adds: |
877 | 76 | delta.append((None, new.id2path(file_id), file_id, new[file_id])) | 76 | delta.append((None, new.id2path(file_id), file_id, new.get_entry(file_id))) |
878 | 77 | for file_id in common: | 77 | for file_id in common: |
880 | 78 | if old[file_id] != new[file_id]: | 78 | if old.get_entry(file_id) != new.get_entry(file_id): |
881 | 79 | delta.append((old.id2path(file_id), new.id2path(file_id), | 79 | delta.append((old.id2path(file_id), new.id2path(file_id), |
882 | 80 | file_id, new[file_id])) | 80 | file_id, new[file_id])) |
883 | 81 | return delta | 81 | return delta |
884 | 82 | 82 | ||
885 | === modified file 'breezy/tests/per_repository_vf/test_fileid_involved.py' | |||
886 | --- breezy/tests/per_repository_vf/test_fileid_involved.py 2018-03-20 00:30:39 +0000 | |||
887 | +++ breezy/tests/per_repository_vf/test_fileid_involved.py 2018-03-25 02:37:06 +0000 | |||
888 | @@ -51,7 +51,7 @@ | |||
889 | 51 | old_rt = b.repository.revision_tree('A-id') | 51 | old_rt = b.repository.revision_tree('A-id') |
890 | 52 | new_inv = inventory.mutable_inventory_from_tree(old_rt) | 52 | new_inv = inventory.mutable_inventory_from_tree(old_rt) |
891 | 53 | new_inv.revision_id = 'B-id' | 53 | new_inv.revision_id = 'B-id' |
893 | 54 | new_inv['a-file-id'].revision = 'ghost-id' | 54 | new_inv.get_entry('a-file-id').revision = 'ghost-id' |
894 | 55 | new_rev = _mod_revision.Revision('B-id', | 55 | new_rev = _mod_revision.Revision('B-id', |
895 | 56 | timestamp=time.time(), | 56 | timestamp=time.time(), |
896 | 57 | timezone=0, | 57 | timezone=0, |
897 | 58 | 58 | ||
898 | === modified file 'breezy/tests/per_tree/test_tree.py' | |||
899 | --- breezy/tests/per_tree/test_tree.py 2018-03-24 23:49:53 +0000 | |||
900 | +++ breezy/tests/per_tree/test_tree.py 2018-03-25 02:37:06 +0000 | |||
901 | @@ -107,7 +107,7 @@ | |||
902 | 107 | tree = self.create_nested() | 107 | tree = self.create_nested() |
903 | 108 | tree.lock_read() | 108 | tree.lock_read() |
904 | 109 | self.addCleanup(tree.unlock) | 109 | self.addCleanup(tree.unlock) |
906 | 110 | entry = tree.root_inventory['sub-root'] | 110 | entry = tree.root_inventory.get_entry('sub-root') |
907 | 111 | self.assertEqual([(u'subtree', 'sub-root')], | 111 | self.assertEqual([(u'subtree', 'sub-root')], |
908 | 112 | list(tree.iter_references())) | 112 | list(tree.iter_references())) |
909 | 113 | 113 | ||
910 | 114 | 114 | ||
911 | === modified file 'breezy/tests/per_workingtree/test_nested_specifics.py' | |||
912 | --- breezy/tests/per_workingtree/test_nested_specifics.py 2018-03-22 02:21:11 +0000 | |||
913 | +++ breezy/tests/per_workingtree/test_nested_specifics.py 2018-03-25 02:37:06 +0000 | |||
914 | @@ -44,7 +44,7 @@ | |||
915 | 44 | tree.lock_read() | 44 | tree.lock_read() |
916 | 45 | self.addCleanup(tree.unlock) | 45 | self.addCleanup(tree.unlock) |
917 | 46 | self.assertEqual('subtree-revision', | 46 | self.assertEqual('subtree-revision', |
919 | 47 | tree.root_inventory['subtree-id'].reference_revision) | 47 | tree.root_inventory.get_entry('subtree-id').reference_revision) |
920 | 48 | 48 | ||
921 | 49 | def test_extract_while_locked(self): | 49 | def test_extract_while_locked(self): |
922 | 50 | tree = self.make_branch_and_tree('.') | 50 | tree = self.make_branch_and_tree('.') |
923 | 51 | 51 | ||
924 | === modified file 'breezy/tests/per_workingtree/test_parents.py' | |||
925 | --- breezy/tests/per_workingtree/test_parents.py 2018-03-24 13:45:31 +0000 | |||
926 | +++ breezy/tests/per_workingtree/test_parents.py 2018-03-25 02:37:06 +0000 | |||
927 | @@ -419,11 +419,11 @@ | |||
928 | 419 | for file_id in deletes: | 419 | for file_id in deletes: |
929 | 420 | delta.append((old.id2path(file_id), None, file_id, None)) | 420 | delta.append((old.id2path(file_id), None, file_id, None)) |
930 | 421 | for file_id in adds: | 421 | for file_id in adds: |
932 | 422 | delta.append((None, new.id2path(file_id), file_id, new[file_id])) | 422 | delta.append((None, new.id2path(file_id), file_id, new.get_entry(file_id))) |
933 | 423 | for file_id in common: | 423 | for file_id in common: |
935 | 424 | if old[file_id] != new[file_id]: | 424 | if old.get_entry(file_id) != new.get_entry(file_id): |
936 | 425 | delta.append((old.id2path(file_id), new.id2path(file_id), | 425 | delta.append((old.id2path(file_id), new.id2path(file_id), |
938 | 426 | file_id, new[file_id])) | 426 | file_id, new.get_entry(file_id))) |
939 | 427 | return delta | 427 | return delta |
940 | 428 | 428 | ||
941 | 429 | def fake_up_revision(self, tree, revid, shape): | 429 | def fake_up_revision(self, tree, revid, shape): |
942 | @@ -440,7 +440,7 @@ | |||
943 | 440 | def get_file_text(self, path, file_id=None): | 440 | def get_file_text(self, path, file_id=None): |
944 | 441 | if file_id is None: | 441 | if file_id is None: |
945 | 442 | file_id = self.path2id(path) | 442 | file_id = self.path2id(path) |
947 | 443 | ie = self.root_inventory[file_id] | 443 | ie = self.root_inventory.get_entry(file_id) |
948 | 444 | if ie.kind != "file": | 444 | if ie.kind != "file": |
949 | 445 | return "" | 445 | return "" |
950 | 446 | return 'a' * ie.text_size | 446 | return 'a' * ie.text_size |
951 | 447 | 447 | ||
952 | === modified file 'breezy/tests/test_commit.py' | |||
953 | --- breezy/tests/test_commit.py 2018-03-12 00:47:15 +0000 | |||
954 | +++ breezy/tests/test_commit.py 2018-03-25 02:37:06 +0000 | |||
955 | @@ -331,9 +331,9 @@ | |||
956 | 331 | wt.unlock() | 331 | wt.unlock() |
957 | 332 | 332 | ||
958 | 333 | inv = b.repository.get_inventory(r4) | 333 | inv = b.repository.get_inventory(r4) |
962 | 334 | eq(inv['hello-id'].revision, r4) | 334 | eq(inv.get_entry('hello-id').revision, r4) |
963 | 335 | eq(inv['a-id'].revision, r1) | 335 | eq(inv.get_entry('a-id').revision, r1) |
964 | 336 | eq(inv['b-id'].revision, r3) | 336 | eq(inv.get_entry('b-id').revision, r3) |
965 | 337 | 337 | ||
966 | 338 | def test_removed_commit(self): | 338 | def test_removed_commit(self): |
967 | 339 | """Commit with a removed file""" | 339 | """Commit with a removed file""" |
968 | @@ -373,10 +373,10 @@ | |||
969 | 373 | ['dirid', 'file1id', 'file2id']) | 373 | ['dirid', 'file1id', 'file2id']) |
970 | 374 | wt.commit('dir/file1', specific_files=['dir/file1'], rev_id='1') | 374 | wt.commit('dir/file1', specific_files=['dir/file1'], rev_id='1') |
971 | 375 | inv = b.repository.get_inventory('1') | 375 | inv = b.repository.get_inventory('1') |
974 | 376 | self.assertEqual('1', inv['dirid'].revision) | 376 | self.assertEqual('1', inv.get_entry('dirid').revision) |
975 | 377 | self.assertEqual('1', inv['file1id'].revision) | 377 | self.assertEqual('1', inv.get_entry('file1id').revision) |
976 | 378 | # FIXME: This should raise a KeyError I think, rbc20051006 | 378 | # FIXME: This should raise a KeyError I think, rbc20051006 |
978 | 379 | self.assertRaises(BzrError, inv.__getitem__, 'file2id') | 379 | self.assertRaises(BzrError, inv.get_entry, 'file2id') |
979 | 380 | 380 | ||
980 | 381 | def test_strict_commit(self): | 381 | def test_strict_commit(self): |
981 | 382 | """Try and commit with unknown files and strict = True, should fail.""" | 382 | """Try and commit with unknown files and strict = True, should fail.""" |
982 | 383 | 383 | ||
983 | === modified file 'breezy/tests/test_dirstate.py' | |||
984 | --- breezy/tests/test_dirstate.py 2017-11-17 03:06:50 +0000 | |||
985 | +++ breezy/tests/test_dirstate.py 2018-03-25 02:37:06 +0000 | |||
986 | @@ -925,7 +925,7 @@ | |||
987 | 925 | for entry in state._iter_entries(): | 925 | for entry in state._iter_entries(): |
988 | 926 | values.append(entry[0] + entry[1][0][:1]) | 926 | values.append(entry[0] + entry[1][0][:1]) |
989 | 927 | self.assertEqual(expected_result1, values) | 927 | self.assertEqual(expected_result1, values) |
991 | 928 | del inv['b-id'] | 928 | inv.delete('b-id') |
992 | 929 | state.set_state_from_inventory(inv) | 929 | state.set_state_from_inventory(inv) |
993 | 930 | values = [] | 930 | values = [] |
994 | 931 | for entry in state._iter_entries(): | 931 | for entry in state._iter_entries(): |
995 | 932 | 932 | ||
996 | === modified file 'breezy/tests/test_inv.py' | |||
997 | --- breezy/tests/test_inv.py 2018-02-16 19:38:39 +0000 | |||
998 | +++ breezy/tests/test_inv.py 2018-03-25 02:37:06 +0000 | |||
999 | @@ -314,7 +314,7 @@ | |||
1000 | 314 | inv.root.file_id = b'some-new-root' | 314 | inv.root.file_id = b'some-new-root' |
1001 | 315 | ie.name = u'file2' | 315 | ie.name = u'file2' |
1002 | 316 | self.assertEqual(b'some-tree-root', inv2.root.file_id) | 316 | self.assertEqual(b'some-tree-root', inv2.root.file_id) |
1004 | 317 | self.assertEqual(u'hello', inv2[b'hello-id'].name) | 317 | self.assertEqual(u'hello', inv2.get_entry(b'hello-id').name) |
1005 | 318 | 318 | ||
1006 | 319 | def test_copy_empty(self): | 319 | def test_copy_empty(self): |
1007 | 320 | """Make sure an empty inventory can be copied.""" | 320 | """Make sure an empty inventory can be copied.""" |
1008 | @@ -611,7 +611,7 @@ | |||
1009 | 611 | file1.text_sha1 = '' | 611 | file1.text_sha1 = '' |
1010 | 612 | delta = [(None, u'path', 'file-id', file1)] | 612 | delta = [(None, u'path', 'file-id', file1)] |
1011 | 613 | res_inv = self.apply_delta(self, inv, delta, invalid_delta=False) | 613 | res_inv = self.apply_delta(self, inv, delta, invalid_delta=False) |
1013 | 614 | self.assertEqual('file-id', res_inv['file-id'].file_id) | 614 | self.assertEqual('file-id', res_inv.get_entry('file-id').file_id) |
1014 | 615 | 615 | ||
1015 | 616 | def test_remove_file(self): | 616 | def test_remove_file(self): |
1016 | 617 | inv = self.get_empty_inventory() | 617 | inv = self.get_empty_inventory() |
1017 | @@ -940,18 +940,18 @@ | |||
1018 | 940 | inv.revision_id = "revid" | 940 | inv.revision_id = "revid" |
1019 | 941 | inv.root.revision = "rootrev" | 941 | inv.root.revision = "rootrev" |
1020 | 942 | inv.add(InventoryFile("fileid", "file", inv.root.file_id)) | 942 | inv.add(InventoryFile("fileid", "file", inv.root.file_id)) |
1025 | 943 | inv["fileid"].revision = "filerev" | 943 | inv.get_entry("fileid").revision = "filerev" |
1026 | 944 | inv["fileid"].executable = True | 944 | inv.get_entry("fileid").executable = True |
1027 | 945 | inv["fileid"].text_sha1 = "ffff" | 945 | inv.get_entry("fileid").text_sha1 = "ffff" |
1028 | 946 | inv["fileid"].text_size = 1 | 946 | inv.get_entry("fileid").text_size = 1 |
1029 | 947 | chk_bytes = self.get_chk_bytes() | 947 | chk_bytes = self.get_chk_bytes() |
1030 | 948 | chk_inv = CHKInventory.from_inventory(chk_bytes, inv) | 948 | chk_inv = CHKInventory.from_inventory(chk_bytes, inv) |
1031 | 949 | bytes = ''.join(chk_inv.to_lines()) | 949 | bytes = ''.join(chk_inv.to_lines()) |
1032 | 950 | new_inv = CHKInventory.deserialise(chk_bytes, bytes, ("revid",)) | 950 | new_inv = CHKInventory.deserialise(chk_bytes, bytes, ("revid",)) |
1034 | 951 | root_entry = new_inv[inv.root.file_id] | 951 | root_entry = new_inv.get_entry(inv.root.file_id) |
1035 | 952 | self.assertEqual(None, root_entry._children) | 952 | self.assertEqual(None, root_entry._children) |
1036 | 953 | self.assertEqual({'file'}, set(root_entry.children)) | 953 | self.assertEqual({'file'}, set(root_entry.children)) |
1038 | 954 | file_direct = new_inv["fileid"] | 954 | file_direct = new_inv.get_entry("fileid") |
1039 | 955 | file_found = root_entry.children['file'] | 955 | file_found = root_entry.children['file'] |
1040 | 956 | self.assertEqual(file_direct.kind, file_found.kind) | 956 | self.assertEqual(file_direct.kind, file_found.kind) |
1041 | 957 | self.assertEqual(file_direct.file_id, file_found.file_id) | 957 | self.assertEqual(file_direct.file_id, file_found.file_id) |
1042 | @@ -977,20 +977,20 @@ | |||
1043 | 977 | self.assertEqual(120, p_id_basename._root_node.maximum_size) | 977 | self.assertEqual(120, p_id_basename._root_node.maximum_size) |
1044 | 978 | self.assertEqual(2, p_id_basename._root_node._key_width) | 978 | self.assertEqual(2, p_id_basename._root_node._key_width) |
1045 | 979 | 979 | ||
1047 | 980 | def test___iter__(self): | 980 | def test_iter_all_ids(self): |
1048 | 981 | inv = Inventory() | 981 | inv = Inventory() |
1049 | 982 | inv.revision_id = "revid" | 982 | inv.revision_id = "revid" |
1050 | 983 | inv.root.revision = "rootrev" | 983 | inv.root.revision = "rootrev" |
1051 | 984 | inv.add(InventoryFile("fileid", "file", inv.root.file_id)) | 984 | inv.add(InventoryFile("fileid", "file", inv.root.file_id)) |
1056 | 985 | inv["fileid"].revision = "filerev" | 985 | inv.get_entry("fileid").revision = "filerev" |
1057 | 986 | inv["fileid"].executable = True | 986 | inv.get_entry("fileid").executable = True |
1058 | 987 | inv["fileid"].text_sha1 = "ffff" | 987 | inv.get_entry("fileid").text_sha1 = "ffff" |
1059 | 988 | inv["fileid"].text_size = 1 | 988 | inv.get_entry("fileid").text_size = 1 |
1060 | 989 | chk_bytes = self.get_chk_bytes() | 989 | chk_bytes = self.get_chk_bytes() |
1061 | 990 | chk_inv = CHKInventory.from_inventory(chk_bytes, inv) | 990 | chk_inv = CHKInventory.from_inventory(chk_bytes, inv) |
1062 | 991 | bytes = ''.join(chk_inv.to_lines()) | 991 | bytes = ''.join(chk_inv.to_lines()) |
1063 | 992 | new_inv = CHKInventory.deserialise(chk_bytes, bytes, ("revid",)) | 992 | new_inv = CHKInventory.deserialise(chk_bytes, bytes, ("revid",)) |
1065 | 993 | fileids = sorted(new_inv.__iter__()) | 993 | fileids = sorted(new_inv.iter_all_ids()) |
1066 | 994 | self.assertEqual([inv.root.file_id, "fileid"], fileids) | 994 | self.assertEqual([inv.root.file_id, "fileid"], fileids) |
1067 | 995 | 995 | ||
1068 | 996 | def test__len__(self): | 996 | def test__len__(self): |
1069 | @@ -998,29 +998,29 @@ | |||
1070 | 998 | inv.revision_id = "revid" | 998 | inv.revision_id = "revid" |
1071 | 999 | inv.root.revision = "rootrev" | 999 | inv.root.revision = "rootrev" |
1072 | 1000 | inv.add(InventoryFile("fileid", "file", inv.root.file_id)) | 1000 | inv.add(InventoryFile("fileid", "file", inv.root.file_id)) |
1077 | 1001 | inv["fileid"].revision = "filerev" | 1001 | inv.get_entry("fileid").revision = "filerev" |
1078 | 1002 | inv["fileid"].executable = True | 1002 | inv.get_entry("fileid").executable = True |
1079 | 1003 | inv["fileid"].text_sha1 = "ffff" | 1003 | inv.get_entry("fileid").text_sha1 = "ffff" |
1080 | 1004 | inv["fileid"].text_size = 1 | 1004 | inv.get_entry("fileid").text_size = 1 |
1081 | 1005 | chk_bytes = self.get_chk_bytes() | 1005 | chk_bytes = self.get_chk_bytes() |
1082 | 1006 | chk_inv = CHKInventory.from_inventory(chk_bytes, inv) | 1006 | chk_inv = CHKInventory.from_inventory(chk_bytes, inv) |
1083 | 1007 | self.assertEqual(2, len(chk_inv)) | 1007 | self.assertEqual(2, len(chk_inv)) |
1084 | 1008 | 1008 | ||
1086 | 1009 | def test___getitem__(self): | 1009 | def test_get_entry(self): |
1087 | 1010 | inv = Inventory() | 1010 | inv = Inventory() |
1088 | 1011 | inv.revision_id = b"revid" | 1011 | inv.revision_id = b"revid" |
1089 | 1012 | inv.root.revision = b"rootrev" | 1012 | inv.root.revision = b"rootrev" |
1090 | 1013 | inv.add(InventoryFile(b"fileid", u"file", inv.root.file_id)) | 1013 | inv.add(InventoryFile(b"fileid", u"file", inv.root.file_id)) |
1095 | 1014 | inv[b"fileid"].revision = b"filerev" | 1014 | inv.get_entry(b"fileid").revision = b"filerev" |
1096 | 1015 | inv[b"fileid"].executable = True | 1015 | inv.get_entry(b"fileid").executable = True |
1097 | 1016 | inv[b"fileid"].text_sha1 = b"ffff" | 1016 | inv.get_entry(b"fileid").text_sha1 = b"ffff" |
1098 | 1017 | inv[b"fileid"].text_size = 1 | 1017 | inv.get_entry(b"fileid").text_size = 1 |
1099 | 1018 | chk_bytes = self.get_chk_bytes() | 1018 | chk_bytes = self.get_chk_bytes() |
1100 | 1019 | chk_inv = CHKInventory.from_inventory(chk_bytes, inv) | 1019 | chk_inv = CHKInventory.from_inventory(chk_bytes, inv) |
1101 | 1020 | data = b''.join(chk_inv.to_lines()) | 1020 | data = b''.join(chk_inv.to_lines()) |
1102 | 1021 | new_inv = CHKInventory.deserialise(chk_bytes, data, (b"revid",)) | 1021 | new_inv = CHKInventory.deserialise(chk_bytes, data, (b"revid",)) |
1105 | 1022 | root_entry = new_inv[inv.root.file_id] | 1022 | root_entry = new_inv.get_entry(inv.root.file_id) |
1106 | 1023 | file_entry = new_inv[b"fileid"] | 1023 | file_entry = new_inv.get_entry(b"fileid") |
1107 | 1024 | self.assertEqual("directory", root_entry.kind) | 1024 | self.assertEqual("directory", root_entry.kind) |
1108 | 1025 | self.assertEqual(inv.root.file_id, root_entry.file_id) | 1025 | self.assertEqual(inv.root.file_id, root_entry.file_id) |
1109 | 1026 | self.assertEqual(inv.root.parent_id, root_entry.parent_id) | 1026 | self.assertEqual(inv.root.parent_id, root_entry.parent_id) |
1110 | @@ -1034,17 +1034,17 @@ | |||
1111 | 1034 | self.assertEqual(b"ffff", file_entry.text_sha1) | 1034 | self.assertEqual(b"ffff", file_entry.text_sha1) |
1112 | 1035 | self.assertEqual(1, file_entry.text_size) | 1035 | self.assertEqual(1, file_entry.text_size) |
1113 | 1036 | self.assertEqual(True, file_entry.executable) | 1036 | self.assertEqual(True, file_entry.executable) |
1115 | 1037 | self.assertRaises(errors.NoSuchId, new_inv.__getitem__, 'missing') | 1037 | self.assertRaises(errors.NoSuchId, new_inv.get_entry, 'missing') |
1116 | 1038 | 1038 | ||
1117 | 1039 | def test_has_id_true(self): | 1039 | def test_has_id_true(self): |
1118 | 1040 | inv = Inventory() | 1040 | inv = Inventory() |
1119 | 1041 | inv.revision_id = "revid" | 1041 | inv.revision_id = "revid" |
1120 | 1042 | inv.root.revision = "rootrev" | 1042 | inv.root.revision = "rootrev" |
1121 | 1043 | inv.add(InventoryFile("fileid", "file", inv.root.file_id)) | 1043 | inv.add(InventoryFile("fileid", "file", inv.root.file_id)) |
1126 | 1044 | inv["fileid"].revision = "filerev" | 1044 | inv.get_entry("fileid").revision = "filerev" |
1127 | 1045 | inv["fileid"].executable = True | 1045 | inv.get_entry("fileid").executable = True |
1128 | 1046 | inv["fileid"].text_sha1 = "ffff" | 1046 | inv.get_entry("fileid").text_sha1 = "ffff" |
1129 | 1047 | inv["fileid"].text_size = 1 | 1047 | inv.get_entry("fileid").text_size = 1 |
1130 | 1048 | chk_bytes = self.get_chk_bytes() | 1048 | chk_bytes = self.get_chk_bytes() |
1131 | 1049 | chk_inv = CHKInventory.from_inventory(chk_bytes, inv) | 1049 | chk_inv = CHKInventory.from_inventory(chk_bytes, inv) |
1132 | 1050 | self.assertTrue(chk_inv.has_id('fileid')) | 1050 | self.assertTrue(chk_inv.has_id('fileid')) |
1133 | @@ -1066,11 +1066,11 @@ | |||
1134 | 1066 | fileentry = InventoryFile("fileid", "file", "dirid") | 1066 | fileentry = InventoryFile("fileid", "file", "dirid") |
1135 | 1067 | inv.add(direntry) | 1067 | inv.add(direntry) |
1136 | 1068 | inv.add(fileentry) | 1068 | inv.add(fileentry) |
1142 | 1069 | inv["fileid"].revision = "filerev" | 1069 | inv.get_entry("fileid").revision = "filerev" |
1143 | 1070 | inv["fileid"].executable = True | 1070 | inv.get_entry("fileid").executable = True |
1144 | 1071 | inv["fileid"].text_sha1 = "ffff" | 1071 | inv.get_entry("fileid").text_sha1 = "ffff" |
1145 | 1072 | inv["fileid"].text_size = 1 | 1072 | inv.get_entry("fileid").text_size = 1 |
1146 | 1073 | inv["dirid"].revision = "filerev" | 1073 | inv.get_entry("dirid").revision = "filerev" |
1147 | 1074 | chk_bytes = self.get_chk_bytes() | 1074 | chk_bytes = self.get_chk_bytes() |
1148 | 1075 | chk_inv = CHKInventory.from_inventory(chk_bytes, inv) | 1075 | chk_inv = CHKInventory.from_inventory(chk_bytes, inv) |
1149 | 1076 | bytes = ''.join(chk_inv.to_lines()) | 1076 | bytes = ''.join(chk_inv.to_lines()) |
1150 | @@ -1087,11 +1087,11 @@ | |||
1151 | 1087 | fileentry = InventoryFile("fileid", "file", "dirid") | 1087 | fileentry = InventoryFile("fileid", "file", "dirid") |
1152 | 1088 | inv.add(direntry) | 1088 | inv.add(direntry) |
1153 | 1089 | inv.add(fileentry) | 1089 | inv.add(fileentry) |
1159 | 1090 | inv["fileid"].revision = "filerev" | 1090 | inv.get_entry("fileid").revision = "filerev" |
1160 | 1091 | inv["fileid"].executable = True | 1091 | inv.get_entry("fileid").executable = True |
1161 | 1092 | inv["fileid"].text_sha1 = "ffff" | 1092 | inv.get_entry("fileid").text_sha1 = "ffff" |
1162 | 1093 | inv["fileid"].text_size = 1 | 1093 | inv.get_entry("fileid").text_size = 1 |
1163 | 1094 | inv["dirid"].revision = "filerev" | 1094 | inv.get_entry("dirid").revision = "filerev" |
1164 | 1095 | chk_bytes = self.get_chk_bytes() | 1095 | chk_bytes = self.get_chk_bytes() |
1165 | 1096 | chk_inv = CHKInventory.from_inventory(chk_bytes, inv) | 1096 | chk_inv = CHKInventory.from_inventory(chk_bytes, inv) |
1166 | 1097 | bytes = ''.join(chk_inv.to_lines()) | 1097 | bytes = ''.join(chk_inv.to_lines()) |
1167 | @@ -1172,18 +1172,18 @@ | |||
1168 | 1172 | inv.revision_id = "revid" | 1172 | inv.revision_id = "revid" |
1169 | 1173 | inv.root.revision = "rootrev" | 1173 | inv.root.revision = "rootrev" |
1170 | 1174 | inv.add(InventoryFile("fileid", "file", inv.root.file_id)) | 1174 | inv.add(InventoryFile("fileid", "file", inv.root.file_id)) |
1175 | 1175 | inv["fileid"].revision = "filerev" | 1175 | inv.get_entry("fileid").revision = "filerev" |
1176 | 1176 | inv["fileid"].executable = True | 1176 | inv.get_entry("fileid").executable = True |
1177 | 1177 | inv["fileid"].text_sha1 = "ffff" | 1177 | inv.get_entry("fileid").text_sha1 = "ffff" |
1178 | 1178 | inv["fileid"].text_size = 1 | 1178 | inv.get_entry("fileid").text_size = 1 |
1179 | 1179 | inv2 = Inventory() | 1179 | inv2 = Inventory() |
1180 | 1180 | inv2.revision_id = "revid2" | 1180 | inv2.revision_id = "revid2" |
1181 | 1181 | inv2.root.revision = "rootrev" | 1181 | inv2.root.revision = "rootrev" |
1182 | 1182 | inv2.add(InventoryFile("fileid", "file", inv.root.file_id)) | 1182 | inv2.add(InventoryFile("fileid", "file", inv.root.file_id)) |
1187 | 1183 | inv2["fileid"].revision = "filerev2" | 1183 | inv2.get_entry("fileid").revision = "filerev2" |
1188 | 1184 | inv2["fileid"].executable = False | 1184 | inv2.get_entry("fileid").executable = False |
1189 | 1185 | inv2["fileid"].text_sha1 = "bbbb" | 1185 | inv2.get_entry("fileid").text_sha1 = "bbbb" |
1190 | 1186 | inv2["fileid"].text_size = 2 | 1186 | inv2.get_entry("fileid").text_size = 2 |
1191 | 1187 | # get fresh objects. | 1187 | # get fresh objects. |
1192 | 1188 | chk_bytes = self.get_chk_bytes() | 1188 | chk_bytes = self.get_chk_bytes() |
1193 | 1189 | chk_inv = CHKInventory.from_inventory(chk_bytes, inv) | 1189 | chk_inv = CHKInventory.from_inventory(chk_bytes, inv) |
1194 | @@ -1202,10 +1202,10 @@ | |||
1195 | 1202 | inv.revision_id = "revid" | 1202 | inv.revision_id = "revid" |
1196 | 1203 | inv.root.revision = "rootrev" | 1203 | inv.root.revision = "rootrev" |
1197 | 1204 | inv.add(InventoryFile("fileid", "file", inv.root.file_id)) | 1204 | inv.add(InventoryFile("fileid", "file", inv.root.file_id)) |
1202 | 1205 | inv["fileid"].revision = "filerev" | 1205 | inv.get_entry("fileid").revision = "filerev" |
1203 | 1206 | inv["fileid"].executable = True | 1206 | inv.get_entry("fileid").executable = True |
1204 | 1207 | inv["fileid"].text_sha1 = "ffff" | 1207 | inv.get_entry("fileid").text_sha1 = "ffff" |
1205 | 1208 | inv["fileid"].text_size = 1 | 1208 | inv.get_entry("fileid").text_size = 1 |
1206 | 1209 | # get fresh objects. | 1209 | # get fresh objects. |
1207 | 1210 | chk_bytes = self.get_chk_bytes() | 1210 | chk_bytes = self.get_chk_bytes() |
1208 | 1211 | tmp_inv = CHKInventory.from_inventory(chk_bytes, inv) | 1211 | tmp_inv = CHKInventory.from_inventory(chk_bytes, inv) |
1209 | @@ -1327,14 +1327,14 @@ | |||
1210 | 1327 | inv.root.revision = "rootrev" | 1327 | inv.root.revision = "rootrev" |
1211 | 1328 | root_id = inv.root.file_id | 1328 | root_id = inv.root.file_id |
1212 | 1329 | inv.add(InventoryFile("fileid", u'f\xefle', root_id)) | 1329 | inv.add(InventoryFile("fileid", u'f\xefle', root_id)) |
1216 | 1330 | inv["fileid"].revision = "filerev" | 1330 | inv.get_entry("fileid").revision = "filerev" |
1217 | 1331 | inv["fileid"].text_sha1 = "ffff" | 1331 | inv.get_entry("fileid").text_sha1 = "ffff" |
1218 | 1332 | inv["fileid"].text_size = 0 | 1332 | inv.get_entry("fileid").text_size = 0 |
1219 | 1333 | inv.add(InventoryDirectory("dirid", u'dir-\N{EURO SIGN}', root_id)) | 1333 | inv.add(InventoryDirectory("dirid", u'dir-\N{EURO SIGN}', root_id)) |
1220 | 1334 | inv.add(InventoryFile("childid", u'ch\xefld', "dirid")) | 1334 | inv.add(InventoryFile("childid", u'ch\xefld', "dirid")) |
1224 | 1335 | inv["childid"].revision = "filerev" | 1335 | inv.get_entry("childid").revision = "filerev" |
1225 | 1336 | inv["childid"].text_sha1 = "ffff" | 1336 | inv.get_entry("childid").text_sha1 = "ffff" |
1226 | 1337 | inv["childid"].text_size = 0 | 1337 | inv.get_entry("childid").text_size = 0 |
1227 | 1338 | chk_bytes = self.get_chk_bytes() | 1338 | chk_bytes = self.get_chk_bytes() |
1228 | 1339 | chk_inv = CHKInventory.from_inventory(chk_bytes, inv) | 1339 | chk_inv = CHKInventory.from_inventory(chk_bytes, inv) |
1229 | 1340 | bytes = ''.join(chk_inv.to_lines()) | 1340 | bytes = ''.join(chk_inv.to_lines()) |
1230 | @@ -1360,16 +1360,16 @@ | |||
1231 | 1360 | inv.root.revision = "rootrev" | 1360 | inv.root.revision = "rootrev" |
1232 | 1361 | root_id = inv.root.file_id | 1361 | root_id = inv.root.file_id |
1233 | 1362 | inv.add(InventoryFile("fileid", "file", root_id)) | 1362 | inv.add(InventoryFile("fileid", "file", root_id)) |
1238 | 1363 | inv["fileid"].revision = "filerev" | 1363 | inv.get_entry("fileid").revision = "filerev" |
1239 | 1364 | inv["fileid"].executable = True | 1364 | inv.get_entry("fileid").executable = True |
1240 | 1365 | inv["fileid"].text_sha1 = "ffff" | 1365 | inv.get_entry("fileid").text_sha1 = "ffff" |
1241 | 1366 | inv["fileid"].text_size = 1 | 1366 | inv.get_entry("fileid").text_size = 1 |
1242 | 1367 | inv.add(InventoryDirectory("dirid", "dir", root_id)) | 1367 | inv.add(InventoryDirectory("dirid", "dir", root_id)) |
1243 | 1368 | inv.add(InventoryFile("childid", "child", "dirid")) | 1368 | inv.add(InventoryFile("childid", "child", "dirid")) |
1248 | 1369 | inv["childid"].revision = "filerev" | 1369 | inv.get_entry("childid").revision = "filerev" |
1249 | 1370 | inv["childid"].executable = False | 1370 | inv.get_entry("childid").executable = False |
1250 | 1371 | inv["childid"].text_sha1 = "dddd" | 1371 | inv.get_entry("childid").text_sha1 = "dddd" |
1251 | 1372 | inv["childid"].text_size = 1 | 1372 | inv.get_entry("childid").text_size = 1 |
1252 | 1373 | chk_bytes = self.get_chk_bytes() | 1373 | chk_bytes = self.get_chk_bytes() |
1253 | 1374 | chk_inv = CHKInventory.from_inventory(chk_bytes, inv) | 1374 | chk_inv = CHKInventory.from_inventory(chk_bytes, inv) |
1254 | 1375 | bytes = ''.join(chk_inv.to_lines()) | 1375 | bytes = ''.join(chk_inv.to_lines()) |
1255 | @@ -1388,7 +1388,7 @@ | |||
1256 | 1388 | 1388 | ||
1257 | 1389 | def test__preload_handles_partially_evaluated_inventory(self): | 1389 | def test__preload_handles_partially_evaluated_inventory(self): |
1258 | 1390 | new_inv = self.make_basic_utf8_inventory() | 1390 | new_inv = self.make_basic_utf8_inventory() |
1260 | 1391 | ie = new_inv[new_inv.root_id] | 1391 | ie = new_inv.get_entry(new_inv.root_id) |
1261 | 1392 | self.assertIs(None, ie._children) | 1392 | self.assertIs(None, ie._children) |
1262 | 1393 | self.assertEqual([u'dir-\N{EURO SIGN}', u'f\xefle'], | 1393 | self.assertEqual([u'dir-\N{EURO SIGN}', u'f\xefle'], |
1263 | 1394 | sorted(ie.children.keys())) | 1394 | sorted(ie.children.keys())) |
1264 | @@ -1399,7 +1399,7 @@ | |||
1265 | 1399 | # No change | 1399 | # No change |
1266 | 1400 | self.assertEqual([u'dir-\N{EURO SIGN}', u'f\xefle'], | 1400 | self.assertEqual([u'dir-\N{EURO SIGN}', u'f\xefle'], |
1267 | 1401 | sorted(ie._children.keys())) | 1401 | sorted(ie._children.keys())) |
1269 | 1402 | ie_dir = new_inv["dirid"] | 1402 | ie_dir = new_inv.get_entry("dirid") |
1270 | 1403 | self.assertEqual([u'ch\xefld'], | 1403 | self.assertEqual([u'ch\xefld'], |
1271 | 1404 | sorted(ie_dir._children.keys())) | 1404 | sorted(ie_dir._children.keys())) |
1272 | 1405 | 1405 | ||
1273 | @@ -1574,9 +1574,9 @@ | |||
1274 | 1574 | inv = mutable_inventory_from_tree(tree) | 1574 | inv = mutable_inventory_from_tree(tree) |
1275 | 1575 | self.assertEqual(revid, inv.revision_id) | 1575 | self.assertEqual(revid, inv.revision_id) |
1276 | 1576 | self.assertEqual(2, len(inv)) | 1576 | self.assertEqual(2, len(inv)) |
1278 | 1577 | self.assertEqual("a", inv['thefileid'].name) | 1577 | self.assertEqual("a", inv.get_entry('thefileid').name) |
1279 | 1578 | # The inventory should be mutable and independent of | 1578 | # The inventory should be mutable and independent of |
1280 | 1579 | # the original tree | 1579 | # the original tree |
1284 | 1580 | self.assertFalse(tree.root_inventory['thefileid'].executable) | 1580 | self.assertFalse(tree.root_inventory.get_entry('thefileid').executable) |
1285 | 1581 | inv['thefileid'].executable = True | 1581 | inv.get_entry('thefileid').executable = True |
1286 | 1582 | self.assertFalse(tree.root_inventory['thefileid'].executable) | 1582 | self.assertFalse(tree.root_inventory.get_entry('thefileid').executable) |
1287 | 1583 | 1583 | ||
1288 | === modified file 'breezy/tests/test_revisiontree.py' | |||
1289 | --- breezy/tests/test_revisiontree.py 2017-11-24 09:25:13 +0000 | |||
1290 | +++ breezy/tests/test_revisiontree.py 2018-03-25 02:37:06 +0000 | |||
1291 | @@ -76,6 +76,6 @@ | |||
1292 | 76 | 76 | ||
1293 | 77 | def test_get_file_mtime_ghost(self): | 77 | def test_get_file_mtime_ghost(self): |
1294 | 78 | path = next(iter(self.rev_tree.all_versioned_paths())) | 78 | path = next(iter(self.rev_tree.all_versioned_paths())) |
1296 | 79 | self.rev_tree.root_inventory[self.rev_tree.path2id(path)].revision = 'ghostrev' | 79 | self.rev_tree.root_inventory.get_entry(self.rev_tree.path2id(path)).revision = 'ghostrev' |
1297 | 80 | self.assertRaises(FileTimestampUnavailable, | 80 | self.assertRaises(FileTimestampUnavailable, |
1298 | 81 | self.rev_tree.get_file_mtime, path) | 81 | self.rev_tree.get_file_mtime, path) |
1299 | 82 | 82 | ||
1300 | === modified file 'breezy/tests/test_transform.py' | |||
1301 | --- breezy/tests/test_transform.py 2018-03-24 02:22:27 +0000 | |||
1302 | +++ breezy/tests/test_transform.py 2018-03-25 02:37:06 +0000 | |||
1303 | @@ -400,7 +400,7 @@ | |||
1304 | 400 | tree.lock_read() | 400 | tree.lock_read() |
1305 | 401 | self.addCleanup(tree.unlock) | 401 | self.addCleanup(tree.unlock) |
1306 | 402 | self.assertEqual('subtree-revision', | 402 | self.assertEqual('subtree-revision', |
1308 | 403 | tree.root_inventory['subtree-id'].reference_revision) | 403 | tree.root_inventory.get_entry('subtree-id').reference_revision) |
1309 | 404 | 404 | ||
1310 | 405 | def test_conflicts(self): | 405 | def test_conflicts(self): |
1311 | 406 | transform, root = self.get_transform() | 406 | transform, root = self.get_transform() |
1312 | 407 | 407 | ||
1313 | === modified file 'breezy/tests/test_xml.py' | |||
1314 | --- breezy/tests/test_xml.py 2018-02-17 02:02:38 +0000 | |||
1315 | +++ breezy/tests/test_xml.py 2018-03-25 02:37:06 +0000 | |||
1316 | @@ -227,11 +227,11 @@ | |||
1317 | 227 | inv = breezy.bzr.xml5.serializer_v5.read_inventory(inp) | 227 | inv = breezy.bzr.xml5.serializer_v5.read_inventory(inp) |
1318 | 228 | eq = self.assertEqual | 228 | eq = self.assertEqual |
1319 | 229 | eq(len(inv), 4) | 229 | eq(len(inv), 4) |
1321 | 230 | ie = inv['bar-20050824000535-6bc48cfad47ed134'] | 230 | ie = inv.get_entry('bar-20050824000535-6bc48cfad47ed134') |
1322 | 231 | eq(ie.kind, 'file') | 231 | eq(ie.kind, 'file') |
1323 | 232 | eq(ie.revision, 'mbp@foo-00') | 232 | eq(ie.revision, 'mbp@foo-00') |
1324 | 233 | eq(ie.name, 'bar') | 233 | eq(ie.name, 'bar') |
1326 | 234 | eq(inv[ie.parent_id].kind, 'directory') | 234 | eq(inv.get_entry(ie.parent_id).kind, 'directory') |
1327 | 235 | 235 | ||
1328 | 236 | def test_unpack_basis_inventory_5(self): | 236 | def test_unpack_basis_inventory_5(self): |
1329 | 237 | """Unpack canned new-style inventory""" | 237 | """Unpack canned new-style inventory""" |
1330 | @@ -240,11 +240,11 @@ | |||
1331 | 240 | eq = self.assertEqual | 240 | eq = self.assertEqual |
1332 | 241 | eq(len(inv), 4) | 241 | eq(len(inv), 4) |
1333 | 242 | eq(inv.revision_id, 'mbp@sourcefrog.net-20050905063503-43948f59fa127d92') | 242 | eq(inv.revision_id, 'mbp@sourcefrog.net-20050905063503-43948f59fa127d92') |
1335 | 243 | ie = inv['bar-20050824000535-6bc48cfad47ed134'] | 243 | ie = inv.get_entry('bar-20050824000535-6bc48cfad47ed134') |
1336 | 244 | eq(ie.kind, 'file') | 244 | eq(ie.kind, 'file') |
1337 | 245 | eq(ie.revision, 'mbp@foo-00') | 245 | eq(ie.revision, 'mbp@foo-00') |
1338 | 246 | eq(ie.name, 'bar') | 246 | eq(ie.name, 'bar') |
1340 | 247 | eq(inv[ie.parent_id].kind, 'directory') | 247 | eq(inv.get_entry(ie.parent_id).kind, 'directory') |
1341 | 248 | 248 | ||
1342 | 249 | def test_unpack_inventory_5a(self): | 249 | def test_unpack_inventory_5a(self): |
1343 | 250 | inv = breezy.bzr.xml5.serializer_v5.read_inventory_from_string( | 250 | inv = breezy.bzr.xml5.serializer_v5.read_inventory_from_string( |
1344 | @@ -357,13 +357,13 @@ | |||
1345 | 357 | inv.add(inventory.InventoryDirectory('dir-id', 'dir', | 357 | inv.add(inventory.InventoryDirectory('dir-id', 'dir', |
1346 | 358 | 'tree-root-321')) | 358 | 'tree-root-321')) |
1347 | 359 | inv.add(inventory.InventoryLink('link-id', 'link', 'tree-root-321')) | 359 | inv.add(inventory.InventoryLink('link-id', 'link', 'tree-root-321')) |
1355 | 360 | inv['tree-root-321'].revision = 'rev_outer' | 360 | inv.get_entry('tree-root-321').revision = 'rev_outer' |
1356 | 361 | inv['dir-id'].revision = 'rev_outer' | 361 | inv.get_entry('dir-id').revision = 'rev_outer' |
1357 | 362 | inv['file-id'].revision = 'rev_outer' | 362 | inv.get_entry('file-id').revision = 'rev_outer' |
1358 | 363 | inv['file-id'].text_sha1 = 'A' | 363 | inv.get_entry('file-id').text_sha1 = 'A' |
1359 | 364 | inv['file-id'].text_size = 1 | 364 | inv.get_entry('file-id').text_size = 1 |
1360 | 365 | inv['link-id'].revision = 'rev_outer' | 365 | inv.get_entry('link-id').revision = 'rev_outer' |
1361 | 366 | inv['link-id'].symlink_target = 'a' | 366 | inv.get_entry('link-id').symlink_target = 'a' |
1362 | 367 | return inv | 367 | return inv |
1363 | 368 | 368 | ||
1364 | 369 | def test_roundtrip_inventory_v7(self): | 369 | def test_roundtrip_inventory_v7(self): |
1365 | @@ -377,7 +377,7 @@ | |||
1366 | 377 | inv2 = xml7.serializer_v7.read_inventory_from_string(txt) | 377 | inv2 = xml7.serializer_v7.read_inventory_from_string(txt) |
1367 | 378 | self.assertEqual(5, len(inv2)) | 378 | self.assertEqual(5, len(inv2)) |
1368 | 379 | for path, ie in inv.iter_entries(): | 379 | for path, ie in inv.iter_entries(): |
1370 | 380 | self.assertEqual(ie, inv2[ie.file_id]) | 380 | self.assertEqual(ie, inv2.get_entry(ie.file_id)) |
1371 | 381 | 381 | ||
1372 | 382 | def test_roundtrip_inventory_v6(self): | 382 | def test_roundtrip_inventory_v6(self): |
1373 | 383 | inv = self.get_sample_inventory() | 383 | inv = self.get_sample_inventory() |
1374 | @@ -388,7 +388,7 @@ | |||
1375 | 388 | inv2 = xml6.serializer_v6.read_inventory_from_string(txt) | 388 | inv2 = xml6.serializer_v6.read_inventory_from_string(txt) |
1376 | 389 | self.assertEqual(4, len(inv2)) | 389 | self.assertEqual(4, len(inv2)) |
1377 | 390 | for path, ie in inv.iter_entries(): | 390 | for path, ie in inv.iter_entries(): |
1379 | 391 | self.assertEqual(ie, inv2[ie.file_id]) | 391 | self.assertEqual(ie, inv2.get_entry(ie.file_id)) |
1380 | 392 | 392 | ||
1381 | 393 | def test_wrong_format_v7(self): | 393 | def test_wrong_format_v7(self): |
1382 | 394 | """Can't accidentally open a file with wrong serializer""" | 394 | """Can't accidentally open a file with wrong serializer""" |
1383 | @@ -415,9 +415,9 @@ | |||
1384 | 415 | lines = s_v7.write_inventory_to_lines(inv) | 415 | lines = s_v7.write_inventory_to_lines(inv) |
1385 | 416 | self.assertEqual(breezy.osutils.split_lines(txt), lines) | 416 | self.assertEqual(breezy.osutils.split_lines(txt), lines) |
1386 | 417 | inv2 = s_v7.read_inventory_from_string(txt) | 417 | inv2 = s_v7.read_inventory_from_string(txt) |
1390 | 418 | self.assertEqual('tree-root-321', inv2['nested-id'].parent_id) | 418 | self.assertEqual('tree-root-321', inv2.get_entry('nested-id').parent_id) |
1391 | 419 | self.assertEqual('rev-outer', inv2['nested-id'].revision) | 419 | self.assertEqual('rev-outer', inv2.get_entry('nested-id').revision) |
1392 | 420 | self.assertEqual('rev-inner', inv2['nested-id'].reference_revision) | 420 | self.assertEqual('rev-inner', inv2.get_entry('nested-id').reference_revision) |
1393 | 421 | 421 | ||
1394 | 422 | def test_roundtrip_inventory_v8(self): | 422 | def test_roundtrip_inventory_v8(self): |
1395 | 423 | inv = self.get_sample_inventory() | 423 | inv = self.get_sample_inventory() |
1396 | @@ -425,7 +425,7 @@ | |||
1397 | 425 | inv2 = xml8.serializer_v8.read_inventory_from_string(txt) | 425 | inv2 = xml8.serializer_v8.read_inventory_from_string(txt) |
1398 | 426 | self.assertEqual(4, len(inv2)) | 426 | self.assertEqual(4, len(inv2)) |
1399 | 427 | for path, ie in inv.iter_entries(): | 427 | for path, ie in inv.iter_entries(): |
1401 | 428 | self.assertEqual(ie, inv2[ie.file_id]) | 428 | self.assertEqual(ie, inv2.get_entry(ie.file_id)) |
1402 | 429 | 429 | ||
1403 | 430 | def test_inventory_text_v8(self): | 430 | def test_inventory_text_v8(self): |
1404 | 431 | inv = self.get_sample_inventory() | 431 | inv = self.get_sample_inventory() |
1405 | 432 | 432 | ||
1406 | === modified file 'breezy/tree.py' | |||
1407 | --- breezy/tree.py 2018-03-24 10:24:48 +0000 | |||
1408 | +++ breezy/tree.py 2018-03-25 02:37:06 +0000 | |||
1409 | @@ -1186,7 +1186,7 @@ | |||
1410 | 1186 | return (None, None) | 1186 | return (None, None) |
1411 | 1187 | else: | 1187 | else: |
1412 | 1188 | self._out_of_order_processed.add(file_id) | 1188 | self._out_of_order_processed.add(file_id) |
1414 | 1189 | cur_ie = other_tree.root_inventory[file_id] | 1189 | cur_ie = other_tree.root_inventory.get_entry(file_id) |
1415 | 1190 | return (cur_path, cur_ie) | 1190 | return (cur_path, cur_ie) |
1416 | 1191 | 1191 | ||
1417 | 1192 | def iter_all(self): | 1192 | def iter_all(self): |
Thanks. Couple of inline notes.