Merge ~racb/git-ubuntu:prepare-upload-adjustments into git-ubuntu:master
- Git
- lp:~racb/git-ubuntu
- prepare-upload-adjustments
- Merge into master
Status: | Superseded | ||||||||
---|---|---|---|---|---|---|---|---|---|
Proposed branch: | ~racb/git-ubuntu:prepare-upload-adjustments | ||||||||
Merge into: | git-ubuntu:master | ||||||||
Diff against target: |
19208 lines (+18908/-0) (has conflicts) 50 files modified
doc/README.md (+117/-0) doc/SPECIFICATION (+167/-0) doc/release-process.md (+264/-0) gitubuntu/changelog_date_overrides.txt (+19/-0) gitubuntu/changelog_tests/maintainer_name_inner_space (+8/-0) gitubuntu/changelog_tests/maintainer_name_leading_space (+8/-0) gitubuntu/changelog_tests/maintainer_name_trailing_space (+8/-0) gitubuntu/changelog_tests/test_date_1 (+8/-0) gitubuntu/changelog_tests/test_date_2 (+8/-0) gitubuntu/changelog_tests/test_distribution (+8/-0) gitubuntu/changelog_tests/test_distribution_source_1 (+8/-0) gitubuntu/changelog_tests/test_distribution_source_2 (+8/-0) gitubuntu/changelog_tests/test_distribution_source_3 (+8/-0) gitubuntu/changelog_tests/test_distribution_source_4 (+8/-0) gitubuntu/changelog_tests/test_maintainer_1 (+8/-0) gitubuntu/changelog_tests/test_maintainer_2 (+8/-0) gitubuntu/changelog_tests/test_maintainer_3 (+8/-0) gitubuntu/changelog_tests/test_versions_1 (+8/-0) gitubuntu/changelog_tests/test_versions_2 (+14/-0) gitubuntu/changelog_tests/test_versions_3 (+26/-0) gitubuntu/clone.py (+178/-0) gitubuntu/git_repository.py (+3026/-0) gitubuntu/git_repository_test.py (+1191/-0) gitubuntu/importer.py (+2703/-0) gitubuntu/importer_service.py (+916/-0) gitubuntu/importer_service_broker.py (+178/-0) gitubuntu/importer_service_poller.py (+239/-0) gitubuntu/importer_service_poller_test.py (+66/-0) gitubuntu/importer_service_worker.py (+311/-0) gitubuntu/importer_test.py (+2288/-0) gitubuntu/prepare_upload.py (+215/-0) gitubuntu/prepare_upload_test.py (+268/-0) gitubuntu/repo_builder.py (+450/-0) gitubuntu/scriptutils.py (+226/-0) gitubuntu/source-package-allowlist.txt (+2881/-0) gitubuntu/source-package-denylist.txt (+56/-0) gitubuntu/source_builder.py (+344/-0) gitubuntu/source_information.py (+785/-0) gitubuntu/source_information_test.py (+503/-0) gitubuntu/submit.py (+252/-0) man/man1/git-ubuntu-clone.1 (+68/-0) man/man1/git-ubuntu-export-orig.1 (+63/-0) man/man1/git-ubuntu-import.1 (+224/-0) man/man1/git-ubuntu-merge.1 (+134/-0) man/man1/git-ubuntu-queue.1 (+96/-0) man/man1/git-ubuntu-remote.1 (+86/-0) man/man1/git-ubuntu-submit.1 (+97/-0) man/man1/git-ubuntu-tag.1 (+88/-0) man/man1/git-ubuntu.1 (+217/-0) setup.py (+40/-0) Conflict in doc/README.md Conflict in doc/SPECIFICATION Conflict in doc/release-process.md Conflict in gitubuntu/changelog_date_overrides.txt Conflict in gitubuntu/changelog_tests/maintainer_name_inner_space Conflict in gitubuntu/changelog_tests/maintainer_name_leading_space Conflict in gitubuntu/changelog_tests/maintainer_name_trailing_space Conflict in gitubuntu/changelog_tests/test_date_1 Conflict in gitubuntu/changelog_tests/test_date_2 Conflict in gitubuntu/changelog_tests/test_distribution Conflict in gitubuntu/changelog_tests/test_distribution_source_1 Conflict in gitubuntu/changelog_tests/test_distribution_source_2 Conflict in gitubuntu/changelog_tests/test_distribution_source_3 Conflict in gitubuntu/changelog_tests/test_distribution_source_4 Conflict in gitubuntu/changelog_tests/test_maintainer_1 Conflict in gitubuntu/changelog_tests/test_maintainer_2 Conflict in gitubuntu/changelog_tests/test_maintainer_3 Conflict in gitubuntu/changelog_tests/test_versions_1 Conflict in gitubuntu/changelog_tests/test_versions_2 Conflict in gitubuntu/changelog_tests/test_versions_3 Conflict in gitubuntu/clone.py Conflict in gitubuntu/git_repository.py Conflict in gitubuntu/git_repository_test.py Conflict in gitubuntu/importer.py Conflict in gitubuntu/importer_service.py Conflict in gitubuntu/importer_service_broker.py Conflict in gitubuntu/importer_service_poller.py Conflict in gitubuntu/importer_service_poller_test.py Conflict in gitubuntu/importer_service_worker.py Conflict in gitubuntu/importer_test.py Conflict in gitubuntu/prepare_upload.py Conflict in gitubuntu/prepare_upload_test.py Conflict in gitubuntu/repo_builder.py Conflict in gitubuntu/scriptutils.py Conflict in gitubuntu/source-package-allowlist.txt Conflict in gitubuntu/source-package-blacklist.txt Conflict in gitubuntu/source-package-denylist.txt Conflict in gitubuntu/source-package-whitelist.txt Conflict in gitubuntu/source_builder.py Conflict in gitubuntu/source_information.py Conflict in gitubuntu/source_information_test.py Conflict in gitubuntu/submit.py Conflict in man/man1/git-ubuntu-clone.1 Conflict in man/man1/git-ubuntu-export-orig.1 Conflict in man/man1/git-ubuntu-import.1 Conflict in man/man1/git-ubuntu-merge.1 Conflict in man/man1/git-ubuntu-queue.1 Conflict in man/man1/git-ubuntu-remote.1 Conflict in man/man1/git-ubuntu-submit.1 Conflict in man/man1/git-ubuntu-tag.1 Conflict in man/man1/git-ubuntu.1 Conflict in setup.py |
||||||||
Related bugs: |
|
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
Athos Ribeiro (community) | Approve | ||
Server Team CI bot | continuous-integration | Approve | |
git-ubuntu developers | Pending | ||
Review via email: mp+413881@code.launchpad.net |
This proposal has been superseded by a proposal from 2023-05-25.
Commit message
Make Jenkins happy
Description of the change
Server Team CI bot (server-team-bot) wrote : | # |
Athos Ribeiro (athos-ribeiro) wrote : | # |
LGTM!
At first I wondered if it would be a good idea to verify the contents of the "headers" dict in "cli_printargs", but then I realized it is already being done in "push" (even though that is being done through calls to "assert" and therefore we do rely on __debug__ being set to True).
Robie Basak (racb) wrote : | # |
> At first I wondered if it would be a good idea to verify the contents of the "headers" dict in "cli_printargs", but then I realized it is already being done in "push"
Right - it's more tedious to test from something closer to the CLI interface, so I tend to test the inner bits more directly.
> (even though that is being done through calls to "assert" and therefore we do rely on __debug__ being set to True).
I'm not sure we're on the same page here. What I mean above is that I'm testing the contents of the "headers" dict in prepare_
There are separate assert statements in the code itself in prepare_
Athos Ribeiro (athos-ribeiro) wrote : | # |
> I'm not sure we're on the same page here. What I mean above is that I'm testing the contents of the "headers" dict in prepare_
I was referring to the assert calls in `gitubuntu/
> # However, we don't know of any actual case when this might happen, so
# these are assertions rather than fully UX-compliant error paths.
> If there's something you spotted that you think isn't being tested from there, I'd like to add it!
The only thing that came to mind was the regular expression to parse the git URL. Although it is based on the one present in `https:/
This LGTM :)
- 1f99957... by Lena Voytek
-
Updates for inclusive naming
Edit filenames, variables, and comments to match inclusive naming
standards. The user experience will remain the same with this
update. However, when allowing and denying specific packages,
additions must be placed in source-package- allowlist. txt and
source-package- denylist. txt instead of source- package- whitelist. txt
and source-package- blacklist. txt. Signed-off-by: Lena Voytek <email address hidden>
- ef3b3a9... by Robie Basak
-
Merge remote-tracking branch 'lvoytek/master'
- 57e5776... by Robie Basak
-
Update maintainer email address
<email address hidden> no longer exists. Use
<email address hidden> instead. - 002e452... by Robie Basak
-
Update email address to request an import
This is for the message printed when a repository is not found on "git
ubuntu clone".Since <email address hidden> no longer exists, we'll use
<email address hidden> instead. - 2f0e855... by Robie Basak
-
Update email address default used in tests
Since <email address hidden> no longer exists, we'll use
<email address hidden> instead. This shouldn't affect
production code since the repo_builder and source_builder modules are
only used in tests. - 5aad111... by Robie Basak
-
Update default bot account name
usd-importer-bot is now renamed to git-ubuntu-bot as part of catching up
the project rename to git-ubuntu. - 5b0868f... by Sergio Durigan Junior
-
Accept ref names containing plus sign
Currently, if the ref name contains a plus sign, git ubuntu will fail
due to the following assertion error:Traceback (most recent call last):
File "/snap/git-ubuntu/ 891/usr/ bin/git- ubuntu" , line 11, in <module>
load_entry_point( 'gitubuntu= =1.0', 'console_scripts', 'git-ubuntu')()
File "/snap/git-ubuntu/ 891/usr/ lib/python3/ dist-packages/ gitubuntu/ __main_ _.py", line 270, in main
sys.exit(args. func(args) )
File "/snap/git-ubuntu/ 891/usr/ lib/python3/ dist-packages/ gitubuntu/ prepare_ upload. py", line 170, in cli_printargs
headers = push(
File "/snap/git-ubuntu/ 891/usr/ lib/python3/ dist-packages/ gitubuntu/ prepare_ upload. py", line 118, in push
assert gitubuntu.importer. VCS_GIT_ REF_VALIDATION. fullmatch( ref.name)
AssertionErrorHowever, branch names (which compose ref names) are allowed to contain
the plus sign. This commit expands the VCS_GIT_REF_VALIDATION regexp
to accept that.FWIW, I triggered this assertion when I named my branch after the
Debian release I was merging (for the net-snmp package):merge-
5.9.1+dfsg- 4-kinetic - f5dc43c... by Robie Basak
-
Clean up get_head_versions()
This method had no tests, and returning a pygit2.Branch object made it
harder to supply test data to other functions that accept data in the
structure returned by this method.In practice, callers only need the version, commit time and commit hash
of each branch head, so return only exactly this, and adjust all
callers. This should not change any behaviour.We also adjust and fill out the docstring.
A unit test will follow in a subsequent change. It can't be added here
without fixing a bug first. - b2f98c1... by Robie Basak
-
GitUbuntuSource
Information: dependency injection Add dependency injection to GitUbuntuSource
Information. This allows the
creation of this object in tests such that we can mock a
launchpadlib.launchpad. Launchpad object. - a11bfa8... by Robie Basak
-
launchpad_
versions_ published_ after: refactor call We don't need to set args and then call with **args, given that is all
we do with it. Instead, just call self.archive.getPublishedSou rces()
with keyword arguments directly. - 9addaa0... by Robie Basak
-
launchpad_
versions_ published_ after: drop return This return statement is redundant since it's at the end of the method
anyway. - 535dca8... by Robie Basak
-
Use date_created to determine head versions
It's incorrect to use date_published to determine head versions that
will be used to match against Launchpad publications, since we use
date_created at commit creation time. We should be using date_created
consistently instead.Not doing so means that we often (always?) fail to find a matching
Launchpad publication that is already imported and end up redundantly
reimporting everything from the beginning of time. This is terrible for
performance.More details on date_created vs. date_published here:
https://irclogs. ubuntu. com/2020/ 04/16/% 23launchpad. html#t13: 45 This change is difficult to test right here. Further refactoring follows
in subsequent changes and a test is added later.LP: #1979650
- 0b8cc0b... by Robie Basak
-
Rewrite launchpad_
versions_ published_ after The logic in this method can be simplified significantly with a rewrite.
To mitigate any regression, a parametrized unit test is added with the
expected behaviour thought out from the importer spec.There is still an inefficiency present here. In theory we could skip
importing pocket copies if the branch corresponding to a pocket is
up-to-date. However, currently the algorithm only matches against the
exact date_created attribute of the Launchpad publication object against
which a particular version was first imported. To ensure that branches
are updated if any new pocket copies have occurred, we must "replay"
them all through the importer. Therefore there is potential here for a
future performance improvement. - 284beb9... by Robie Basak
-
Add unit test for get_head_info()
Now that get_head_info() returns what we expect, we can add a unit test
for it now. - 2133870... by Robie Basak
-
Refactor _head_version_
is_equal Add a docstring, refactor the code to make it more readable, and rename
the method to match its definition better.This should not result in any functional change.
- 06f0eca... by Robie Basak
-
More project renames
This is a followup to commits 57e5776, 002e452, 2f0e855 and 5aad111 with
further cleanups around the project rename from usd-importer to
git-ubuntu and the move to the <email address hidden>
mailing list. Thanks to Bryce for spotting some of the remaining pieces.Where code is no longer used at all, or docs are completely out-of-date,
I've removed it instead of renaming the relevant bits. I've not worried
too much about fixing docs that I've touched if there's some value to
them staying, as that's a bit of a rabbit hole and I'd prefer to make
incremental progress. - 0e3ca5f... by Robie Basak
-
submit: default to ~canonical-
server- reporter On the Canonical Server Team, we have been using ~canonical-server in
its own review slot for the sole purpose of gathering all reviews we're
interested in tracking together on this team's +activereviews page.A problem with this is that we all belong to this team, so when a person
does a review, they sometimes accidentally "grab" that slot, so it
appears as their name rather than the team's, and thus disappears from
the report.One way around this is to use a separate team that none of us actually
belong to. This way we can't "grab" that slot.This changes the default team to this new ~canonical-
server- reporter
team to help those who use the "git ubuntu submit" command. - 4d497a7... by Robie Basak
-
Add comment on missing observability
This should help locate the older emailing code should it be needed in
the future. - c8216a7... by Robie Basak
-
Add test to accept refs that contain '+'
- ab3351a... by Robie Basak
-
Improve documentation on validation constants
This should hopefully do a better job of signposting anyone who wants to
change the constants to better understand the implications of doing so. - 7c524e6... by Robie Basak
-
importer: flip sense of push arguments
Instead of passing around an inverse boolean "no_push" argument, pass
the more natural "push" instead.This should make no functional change, but prepares us to invert the CLI
argument since after that future change passing around an inverse
boolean will make even less sense. - 7f3a586... by Robie Basak
-
importer: flip CLI argument default to not push
git ubuntu import has two uses: 1) it's run by the importer service
workers, which should push by default; 2) it's run by users, for whom it
doesn't generally make sense to push by default.Since it's easy for importer service workers to specify an option by
automation, we flip the default of the CLI to explicitly require --push
if you want to push. This makes it safer and easier to explain to users
how to use the import command locally.The importer service worker then adds --push unconditionally.
This makes the "implied no push" behaviour of certain options redundant,
so those are removed. - 0901740... by Robie Basak
-
scriptutils: remove pool_map_
import_ srcpkg( ) This function is no longer used from anywhere.
- 4a01d97... by Robie Basak
-
importer-
service- worker: add --no-push argument Asking the worker for --no-push enables a deeper dry run for performance
testing purposes. Unlike the behaviour of "git ubuntu import", "push
mode" is the default here because normally when one sets up the importer
service, it would be surprising behaviour not to do this, and this is
the only use case for this command. - ab5d7ad... by Robie Basak
-
Move import_srcpkg() to importer_
service_ worker. py This function is only used from here, so there's no need for it to be in
a different module. - 8681418... by Robie Basak
-
_main_with_repo: simplify if statement
Since the normal operation is to push, and straight after that we
"return 0", it's simpler to immediately "return 0" if we don't want to
push. This stops the usual code path being pushed "to the right", and
makes it easier to follow the logic.This should not result in a functional change.
- f9a91d9... by Robie Basak
-
Add changelog date override for gmsh
- 36eef3f... by Robie Basak
-
prepare-upload: add test for ssh:// URL rewrites
According to LP 1942985, this is another case where a rewrite is
expected. - 695a4d4... by Robie Basak
-
prepare-upload: handle ssh:// rewrites
According to LP 1942985, this is another case where a URL rewrite is
expected.
Unmerged commits
- 695a4d4... by Robie Basak
-
prepare-upload: handle ssh:// rewrites
According to LP 1942985, this is another case where a URL rewrite is
expected. - 36eef3f... by Robie Basak
-
prepare-upload: add test for ssh:// URL rewrites
According to LP 1942985, this is another case where a rewrite is
expected. - cc48a7f... by Robie Basak
-
prepare-upload: output invalid option on failure
If "git ubuntu prepare-upload args" fails for whatever reason, we don't
want dpkg-buildpackage or similar to proceed if invoked using
"dpkg-buildpackage $(git ubuntu prepare-upload args)" as this will
silently hide the error. Instead, we can output an invalid option
"--git-ubuntu- prepare- upload- args-failed" which should cause
dpkg-buildpackage to fail, and hopefully lead the user to find the cause
in stderr from the failure in our command.This change implments this new behaviour.
LP: #1942865
- d8750f1... by Robie Basak
-
prepare-upload: test for invalid option on failure
If "git ubuntu prepare-upload args" fails for whatever reason, we don't
want dpkg-buildpackage or similar to proceed if invoked using
"dpkg-buildpackage $(git ubuntu prepare-upload args)" as this will
silently hide the error. Instead, we can output an invalid option
"--git-ubuntu- prepare- upload- args-failed" which should cause
dpkg-buildpackage to fail, and hopefully lead the user to find the cause
in stderr from the failure in our command.This change adds the test for this behaviour, prior to implementation.
- 3c5c8c7... by Robie Basak
-
prepare-upload: rewrite LP git+ssh:// URLs
Automatically supply the corresponding https:// LP URL for the rich
history changes file headers if a git+ssh:// LP URL is used.LP: #1942985
- 76824a3... by Robie Basak
-
prepare-upload: refactor header data handling
Explicitly pull out the three header data items into their own named
variables to avoid confusion. - dfa0998... by Robie Basak
-
prepare-upload: add test for git+ssh:// rewrite
Identified in LP: #1942985: if a user has a git+ssh:// LP URL, we should
automatically rewrite it to the https:// one.This is the test for this, which is expected to fail because it isn't
fixed yet. - 8fb8e5a... by Robie Basak
-
Fix typos in test docstrings
- ab5d7ad... by Robie Basak
-
Move import_srcpkg() to importer_
service_ worker. py This function is only used from here, so there's no need for it to be in
a different module. - 4a01d97... by Robie Basak
-
importer-
service- worker: add --no-push argument Asking the worker for --no-push enables a deeper dry run for performance
testing purposes. Unlike the behaviour of "git ubuntu import", "push
mode" is the default here because normally when one sets up the importer
service, it would be surprising behaviour not to do this, and this is
the only use case for this command.
Preview Diff
1 | diff --git a/doc/README.md b/doc/README.md | |||
2 | 0 | new file mode 100644 | 0 | new file mode 100644 |
3 | index 0000000..ff10689 | |||
4 | --- /dev/null | |||
5 | +++ b/doc/README.md | |||
6 | @@ -0,0 +1,117 @@ | |||
7 | 1 | <<<<<<< doc/README.md | ||
8 | 2 | ======= | ||
9 | 3 | ## Running the git-ubuntu importer ## | ||
10 | 4 | This just covers how to run [`git ubuntu import`](https://code.launchpad.net/git-ubuntu). | ||
11 | 5 | |||
12 | 6 | ## Getting via snap ## | ||
13 | 7 | The preferred installation method is to install via snap: | ||
14 | 8 | |||
15 | 9 | 1. install the snap | ||
16 | 10 | |||
17 | 11 | $ snap install --classic git-ubuntu | ||
18 | 12 | |||
19 | 13 | ## [Alternate:] Getting via git ## | ||
20 | 14 | Less well tested, but in theory this should work as well. | ||
21 | 15 | |||
22 | 16 | 1. Get `git-ubuntu` from git | ||
23 | 17 | |||
24 | 18 | $ git clone git://git.launchpad.net/git-ubuntu git-ubuntu | ||
25 | 19 | or | ||
26 | 20 | |||
27 | 21 | $ git clone https://git.launchpad.net/git-ubuntu git-ubuntu | ||
28 | 22 | or | ||
29 | 23 | |||
30 | 24 | $ git clone ssh://git.launchpad.net/git-ubuntu git-ubuntu | ||
31 | 25 | |||
32 | 26 | |||
33 | 27 | 2. Put it in your PATH | ||
34 | 28 | |||
35 | 29 | $ PATH="$PWD/git-ubuntu/bin:$PATH" | ||
36 | 30 | |||
37 | 31 | 3. Get necessary dependencies | ||
38 | 32 | |||
39 | 33 | $ sudo apt update -qy | ||
40 | 34 | |||
41 | 35 | $ deps="dpkg-dev git-buildpackage python3-argcomplete \ | ||
42 | 36 | python3-lazr.restfulclient python3-debian python3-distro-info \ | ||
43 | 37 | python3-launchpadlib python3-pygit2 python3-ubuntutools \ | ||
44 | 38 | python3-cachetools python3-pkg-resources python3-pytest \ | ||
45 | 39 | python3-petname quilt" | ||
46 | 40 | |||
47 | 41 | $ sudo apt install -qy ${deps} | ||
48 | 42 | |||
49 | 43 | |||
50 | 44 | ## Running ## | ||
51 | 45 | |||
52 | 46 | * For local usage | ||
53 | 47 | |||
54 | 48 | `git ubuntu import` will push to launchpad git by default. If you just want to get a git repo locally of a given package, then: | ||
55 | 49 | |||
56 | 50 | $ mkdir ${HOME}/Imports | ||
57 | 51 | $ PKG=uvtool | ||
58 | 52 | $ git ubuntu import -v --no-push --directory=${HOME}/Imports/$PKG $PKG | ||
59 | 53 | |||
60 | 54 | * As member of [git-ubuntu-import](https://launchpad.net/~git-ubuntu-import) for official publishing. | ||
61 | 55 | |||
62 | 56 | $ PKG=uvtool | ||
63 | 57 | $ LP_USER=smoser # your launchpad user name if different from $USER | ||
64 | 58 | $ git ubuntu import -v --directory=$PKG --lp-user=$LP_USER $PKG | ||
65 | 59 | |||
66 | 60 | ## Shell completion (bash) ## | ||
67 | 61 | |||
68 | 62 | * `git-ubuntu` will autocomplete by default if global argcomplete has | ||
69 | 63 | been enabled | ||
70 | 64 | (https://github.com/kislyuk/argcomplete#activating-global-completion) | ||
71 | 65 | or specify | ||
72 | 66 | |||
73 | 67 | eval "$(register-python-argcomplete git-ubuntu)" | ||
74 | 68 | |||
75 | 69 | The snap version does this by default. | ||
76 | 70 | |||
77 | 71 | * `git ubuntu` autocompletion is a little more challenging. To enable | ||
78 | 72 | it, add the following to your .bashrc or similar: | ||
79 | 73 | |||
80 | 74 | if [ -f /path/to/git-ubuntu/doc/gitubuntu-completion.sh ]; then | ||
81 | 75 | . /path/to/git-ubuntu/doc/gitubuntu-completion.sh | ||
82 | 76 | fi | ||
83 | 77 | |||
84 | 78 | For the snap version, this would look like: | ||
85 | 79 | |||
86 | 80 | if [ -f /snap/git-ubuntu/current/doc/gitubuntu-completion.sh ]; then | ||
87 | 81 | . /snap/git-ubuntu/current/doc/gitubuntu-completion.sh | ||
88 | 82 | fi | ||
89 | 83 | |||
90 | 84 | ## View Output ## | ||
91 | 85 | If you did a local checkout with `--directory=./$PKG` then you'll have a git repository in `./$PKG/git`. | ||
92 | 86 | |||
93 | 87 | $ cd $PKG | ||
94 | 88 | $ git branch | ||
95 | 89 | ubuntu/saucy | ||
96 | 90 | ubuntu/saucy-proposed | ||
97 | 91 | ubuntu/trusty | ||
98 | 92 | ubuntu/trusty-proposed | ||
99 | 93 | ubuntu/utopic | ||
100 | 94 | ubuntu/utopic-proposed | ||
101 | 95 | ubuntu/vivid | ||
102 | 96 | ubuntu/vivid-proposed | ||
103 | 97 | ubuntu/wily | ||
104 | 98 | ubuntu/xenial | ||
105 | 99 | ubuntu/yakkety | ||
106 | 100 | |||
107 | 101 | If you did `--lp-owner=git-ubuntu-import`, then your repo should be | ||
108 | 102 | listed in web view at [https://code.launchpad.net/~git-ubuntu-import/+git]. And it should be able to be cloned with: | ||
109 | 103 | |||
110 | 104 | $ git clone https://git.launchpad.net/~git-ubuntu-import/ubuntu/+source/$PKG | ||
111 | 105 | or | ||
112 | 106 | |||
113 | 107 | $ git clone lp:~git-ubuntu-import/ubuntu/+source/$PKG | ||
114 | 108 | or | ||
115 | 109 | |||
116 | 110 | $ git ubuntu clone $PKG | ||
117 | 111 | |||
118 | 112 | |||
119 | 113 | ## Links ## | ||
120 | 114 | * [GitWorkflow Wiki page](https://wiki.ubuntu.com/UbuntuDevelopment/Merging/GitWorkflow) | ||
121 | 115 | * [Launchpad git for git-ubuntu](https://code.launchpad.net/git-ubuntu) | ||
122 | 116 | * [Git view of git-ubuntu](https://git.launchpad.net/git-ubuntu) | ||
123 | 117 | >>>>>>> doc/README.md | ||
124 | diff --git a/doc/SPECIFICATION b/doc/SPECIFICATION | |||
125 | 0 | new file mode 100644 | 118 | new file mode 100644 |
126 | index 0000000..21cd184 | |||
127 | --- /dev/null | |||
128 | +++ b/doc/SPECIFICATION | |||
129 | @@ -0,0 +1,167 @@ | |||
130 | 1 | <<<<<<< doc/SPECIFICATION | ||
131 | 2 | ======= | ||
132 | 3 | Specification | ||
133 | 4 | |||
134 | 5 | git URL shortcuts used (add these to ~/.gitconfig or expand them | ||
135 | 6 | manually yourself): | ||
136 | 7 | |||
137 | 8 | [url "ssh://<LPID>@git.launchpad.net/~<LPID>/ubuntu/+source/"] | ||
138 | 9 | insteadof = lpmep: | ||
139 | 10 | |||
140 | 11 | Definitions: "old debian", "old ubuntu", "new debian", "new ubuntu" are | ||
141 | 12 | as understood. Make sure that "old debian" is really the last common | ||
142 | 13 | ancestor of "old ubuntu" and "new debian". Determining this is | ||
143 | 14 | especially prone to error if Ubuntu imported new upstream versions since | ||
144 | 15 | it diverged from Debian. If this is wrong, then pain will result. | ||
145 | 16 | |||
146 | 17 | By "merge" we always mean an "Ubuntu merge", which is in git terms | ||
147 | 18 | really a rebase. No actual git merge takes place in this entire | ||
148 | 19 | workflow. | ||
149 | 20 | |||
150 | 21 | No trees in this workflow ever have quilt patches applied. All commits | ||
151 | 22 | are with quilt fully popped and no .pc directory. Changes to quilt | ||
152 | 23 | patches are seen in debian/patches/* only. | ||
153 | 24 | |||
154 | 25 | Common git references expected (T for tag, B for branch): | ||
155 | 26 | |||
156 | 27 | Things that will be imported by a sponsor or the importer (available | ||
157 | 28 | from: lpusip:<package>; ask a sponsor if missing): | ||
158 | 29 | |||
159 | 30 | * T import/<version> and T upload/<version> | ||
160 | 31 | * Logically this is the tree corresponding to a particular tag; | ||
161 | 32 | history is secondary. | ||
162 | 33 | * The tree is identical to corresponding source package version in the | ||
163 | 34 | archive. | ||
164 | 35 | * For T import/<version>: imported from the archive and pushed to | ||
165 | 36 | ~git-ubuntu-import as an authoritative source. | ||
166 | 37 | * For T upload/<version>: pushed to ~ubuntu-server-dev by an uploader | ||
167 | 38 | to record exactly what was uploaded. | ||
168 | 39 | * Pushing to ~ubuntu-server-dev is restricted to uploaders. | ||
169 | 40 | * The parent commit should be the previous version import or upload | ||
170 | 41 | tag where available. An orphan commit is acceptable in the | ||
171 | 42 | exceptional case that this is not possible. | ||
172 | 43 | |||
173 | 44 | * B ubuntu/devel | ||
174 | 45 | * Logically this is our moving reference for what is currently in the | ||
175 | 46 | Ubuntu development release. | ||
176 | 47 | * In ~ubuntu-server-dev, this must always point to something also | ||
177 | 48 | tagged as import/<version> or upload/<version>. | ||
178 | 49 | * Pushing to ~ubuntu-server-dev is restricted to uploaders. | ||
179 | 50 | * This branch will be rebased to new Debian imports during Ubuntu | ||
180 | 51 | "merges" (but tags will be left behind). | ||
181 | 52 | |||
182 | 53 | Things that should be made available to a sponsor when submitting a | ||
183 | 54 | merge for upload (push to: lpmep:<package>): | ||
184 | 55 | |||
185 | 56 | * T logical/<old ubuntu> | ||
186 | 57 | * Logically, this is a patchset | ||
187 | 58 | ({import,upload}/<old debian>..logical/<old ubuntu>). | ||
188 | 59 | * Breakdown of previous Ubuntu delta. | ||
189 | 60 | * Must be based on an official import/<old debian> or upload/<old debian> | ||
190 | 61 | tag ("official" means from ~ubuntu-server-dev). | ||
191 | 62 | * One commit per logical change over the entire Ubuntu delta. | ||
192 | 63 | * Churn squashed. | ||
193 | 64 | * No upstream changes (so only changes in debian/*). | ||
194 | 65 | * No changes to debian/changelog. | ||
195 | 66 | * No "update-maintainer" or "Vcs-*" or other meta changes. | ||
196 | 67 | * To get to this, you will probably start from reconstruct/<old ubuntu>, | ||
197 | 68 | described below. | ||
198 | 69 | * Coherence checks: | ||
199 | 70 | - Identical to the corresponding import/<version> except for: | ||
200 | 71 | + Meta changes (update-maintainer, Vcs-*) in debian/control. | ||
201 | 72 | + Anything not in debian/*, which should be unchanged | ||
202 | 73 | (exceptionally this happens when new upstream versions were | ||
203 | 74 | imported ahead of Debian). | ||
204 | 75 | + debian/changelog, which should be unchanged. | ||
205 | 76 | - No line should be touched twice, except where separate logical | ||
206 | 77 | changes need to touch the same line. | ||
207 | 78 | * Providing this makes it easy for the sponsor to check a proposed | ||
208 | 79 | merge: | ||
209 | 80 | 1. Check correctness of this tag against the previous Ubuntu delta | ||
210 | 81 | (perform the above checks and use "git log -p" to make | ||
211 | 82 | sure each logical commit describes only its own changes). | ||
212 | 83 | 2. Ensure that every commit here is accounted for in the proposed | ||
213 | 84 | merge. | ||
214 | 85 | |||
215 | 86 | * B merge | ||
216 | 87 | * Proposed merge for upload. | ||
217 | 88 | * Based on import/<new debian> or upload/<new debian>. | ||
218 | 89 | * One commit per logical change; no changes to debian/changelog in | ||
219 | 90 | those commits. | ||
220 | 91 | * One commit for each of merge-changelogs, reconstruct-changelog, any | ||
221 | 92 | changelog tweaks and ubuntu-meta (or update-maintainer as you wish). | ||
222 | 93 | * debian/changelog should be "released" with the version string | ||
223 | 94 | matching the proposed upload version and targeting the correct | ||
224 | 95 | pocket. | ||
225 | 96 | * Add commits to the end of this branch in response to reviewer | ||
226 | 97 | comments. | ||
227 | 98 | * If agreed with your sponsor that for the changes requested a new | ||
228 | 99 | rebased merge branch will be easier to manage than adding commits to | ||
229 | 100 | the end, then do this instead. Rebase the original "merge" branch. | ||
230 | 101 | To keep history, if you wish tag the old one "merge.v1". You may | ||
231 | 102 | also rebase like this as you wish during preparation before | ||
232 | 103 | presenting this branch for review. | ||
233 | 104 | |||
234 | 105 | Things you may want to make available to reviewers so that they can | ||
235 | 106 | check your process (push to: lpmep:<package>), for which we have | ||
236 | 107 | standardised names: | ||
237 | 108 | |||
238 | 109 | * T reconstruct/<old ubuntu> | ||
239 | 110 | * Logically, this is a patchset | ||
240 | 111 | ({import,upload}/<old debian>..reconstruct/<old ubuntu>). | ||
241 | 112 | * Based on import/<old debian>. For each Ubuntu upload since then: | ||
242 | 113 | * One commit to pull in a new upstream if there is one (rare). This | ||
243 | 114 | must not contain any changes to debian/. | ||
244 | 115 | * One commit per logical change. | ||
245 | 116 | * One commit for changelog. | ||
246 | 117 | * One commit for any ubuntu-meta/update-maintainer change (usually | ||
247 | 118 | only in merge uploads). | ||
248 | 119 | * Drop non-logical commits from this tip and rebase to squash and | ||
249 | 120 | split to derive the logical/<old ubuntu> tag. | ||
250 | 121 | |||
251 | 122 | * T merge.v1, merge.v2, etc. | ||
252 | 123 | * The old state of each merge branch before you rebased it. Only | ||
253 | 124 | useful if you rebased during your merge. If done after your initial | ||
254 | 125 | review request, please only do this with agreement of your sponsor, | ||
255 | 126 | since it causes your sponsor more review time. | ||
256 | 127 | |||
257 | 128 | Merge proposal to make in Launchpad: | ||
258 | 129 | |||
259 | 130 | lpmep:<package> merge → lpusdp:<package> ubuntu/devel | ||
260 | 131 | |||
261 | 132 | After review: | ||
262 | 133 | |||
263 | 134 | If adding commits in response to reveiwer comments, just push again to | ||
264 | 135 | lpmep:<package> merge. | ||
265 | 136 | |||
266 | 137 | If (exceptionally) rebasing in response to reviewer comments: | ||
267 | 138 | 1. Tag the old branch "merge.v1" (or v2, v3 etc. for future iterations) | ||
268 | 139 | 2. Rebase the "merge" branch as required | ||
269 | 140 | 3. Push to lpmep:<package>: | ||
270 | 141 | a) The new "v" tag from above. | ||
271 | 142 | b) The merge branch (force will be required). | ||
272 | 143 | |||
273 | 144 | For "traditional" sponsors: | ||
274 | 145 | |||
275 | 146 | git can easily generate the traditional debdiffs that you normally | ||
276 | 147 | review. Assuming you have appropriate remote tracking branches: | ||
277 | 148 | |||
278 | 149 | * For Ubuntu → Ubuntu, "git diff lpusdp/ubuntu/devel sponsoree/merge" | ||
279 | 150 | * For Debian → Ubuntu, "git diff lpusdp/debian/sid sponsoree/merge" | ||
280 | 151 | |||
281 | 152 | Or you can ask the sponsoree to generate these for you. | ||
282 | 153 | |||
283 | 154 | To upload a reviewed merge (for the sponsor): | ||
284 | 155 | |||
285 | 156 | (Sponsors: you can just ignore these instructions and upload the | ||
286 | 157 | traditional way if you like. But sponsorees cannot push to our VCS and | ||
287 | 158 | you can, so it would be nice if you could push this please, so a future | ||
288 | 159 | merger doesn't have to reconstruct the lost information). | ||
289 | 160 | |||
290 | 161 | 1. Upload using dput as usual. | ||
291 | 162 | 2. Tag the merge branch "upload/<version>" (replace ':' and '~' with '_' | ||
292 | 163 | to meet git's naming requirements). A lightweight tag is fine, or | ||
293 | 164 | go ahead and annotate if you want to include any extra notes. | ||
294 | 165 | 3. Force push the merge branch to lpusdp:<package> ubuntu/devel. | ||
295 | 166 | 4. Push the "upload/<version>" tag to lpusdp:<package>. | ||
296 | 167 | >>>>>>> doc/SPECIFICATION | ||
297 | diff --git a/doc/release-process.md b/doc/release-process.md | |||
298 | 0 | new file mode 100644 | 168 | new file mode 100644 |
299 | index 0000000..524736e | |||
300 | --- /dev/null | |||
301 | +++ b/doc/release-process.md | |||
302 | @@ -0,0 +1,264 @@ | |||
303 | 1 | <<<<<<< doc/release-process.md | ||
304 | 2 | ======= | ||
305 | 3 | Release Process | ||
306 | 4 | =============== | ||
307 | 5 | |||
308 | 6 | 1. Set the new version number | ||
309 | 7 | ------------------------------ | ||
310 | 8 | |||
311 | 9 | See gitubuntu/version.py for the current version number. | ||
312 | 10 | |||
313 | 11 | $ export LAST_RELEASE=$(cat gitubuntu/version.py | cut -d\' -f2) | ||
314 | 12 | $ echo "${LAST_RELEASE}" | ||
315 | 13 | |||
316 | 14 | Git Ubuntu's version numbers follow the common MAJOR.MINOR.PATCH and | ||
317 | 15 | MAJOR.MINOR.PATCH-rcN patterns, where for this project these are | ||
318 | 16 | interpreted as follows: | ||
319 | 17 | |||
320 | 18 | - MAJOR is updated for API breaking changes such as alterations in | ||
321 | 19 | importer hash ABI stability. As a special rule, MAJOR=0 indicates | ||
322 | 20 | no stability guarantees. Notably, changes in MAJOR version are not | ||
323 | 21 | guaranteed to be forward or backward compatible with earlier MAJOR | ||
324 | 22 | versions. | ||
325 | 23 | |||
326 | 24 | - MINOR is incremented for feature-level changes that may alter how | ||
327 | 25 | the git ubuntu frontends behave, including breaking changes in how | ||
328 | 26 | git ubuntu subcommands and their parameters work. The importer API, | ||
329 | 27 | however, is intended to be backward compatible from one MINOR | ||
330 | 28 | version to the next, with no breaking changes. | ||
331 | 29 | |||
332 | 30 | - PATCH is incremented for bug fixes and routine feature additions | ||
333 | 31 | that introduce no compatibility issues for either the backend | ||
334 | 32 | importer or the frontend client. In particular, new commands and | ||
335 | 33 | parameters may be introduced, but existing ones will not be changed | ||
336 | 34 | or removed. | ||
337 | 35 | |||
338 | 36 | - rcN indicates a release candidate, using a sequential numbering for | ||
339 | 37 | 'N'. | ||
340 | 38 | |||
341 | 39 | Define the new version for the release: | ||
342 | 40 | |||
343 | 41 | $ export VERSION="<MAJOR>.<MINOR>.<PATCH>" | ||
344 | 42 | |||
345 | 43 | Or, for a release candidate: | ||
346 | 44 | |||
347 | 45 | $ export VERSION="<MAJOR>.<MINOR>.<PATCH>-rcN" | ||
348 | 46 | |||
349 | 47 | Set it in the git repo: | ||
350 | 48 | |||
351 | 49 | $ git checkout -b ${VERSION}-release | ||
352 | 50 | $ echo "VERSION = '${VERSION}'" > gitubuntu/version.py | ||
353 | 51 | $ git commit gitubuntu/version.py -m "version: bump to ${VERSION}" | ||
354 | 52 | $ git tag --annotate -m "${VERSION} Release" ${VERSION} | ||
355 | 53 | |||
356 | 54 | The annotated tag is necessary, because the snap build mechanisms | ||
357 | 55 | determine the version to set in the snap based on it. | ||
358 | 56 | |||
359 | 57 | |||
360 | 58 | 2. Draft release announcement | ||
361 | 59 | ------------------------------ | ||
362 | 60 | |||
363 | 61 | The release announcement generally summarizes the major changes in the | ||
364 | 62 | release, and (where possible) identifies the bug fixes included in it. | ||
365 | 63 | Some examples of past release announcements: | ||
366 | 64 | |||
367 | 65 | - 0.2.1: https://lists.ubuntu.com/archives/ubuntu-server/2017-September/007594.html | ||
368 | 66 | - 0.3.0: https://lists.ubuntu.com/archives/ubuntu-server/2017-October/007598.html | ||
369 | 67 | - 0.4.0: https://lists.ubuntu.com/archives/ubuntu-server/2017-October/007605.html | ||
370 | 68 | - 0.7.1: https://lists.ubuntu.com/archives/ubuntu-server/2018-March/007667.html | ||
371 | 69 | |||
372 | 70 | The git log can be referred to for changes worth mentioning: | ||
373 | 71 | |||
374 | 72 | $ git log --stat ${LAST_RELEASE}.. | ||
375 | 73 | |||
376 | 74 | If desired, a shortlog can be appended to the release announcement, to | ||
377 | 75 | itemize all changes: | ||
378 | 76 | |||
379 | 77 | $ git shortlog ${LAST_RELEASE}... | ||
380 | 78 | |||
381 | 79 | |||
382 | 80 | 3. Testing | ||
383 | 81 | ----------- | ||
384 | 82 | |||
385 | 83 | First check there are no unexpected test failures in trunk: | ||
386 | 84 | |||
387 | 85 | $ python3 ./setup.py check | ||
388 | 86 | $ python3 ./setup.py build | ||
389 | 87 | $ pytest-3 . | ||
390 | 88 | |||
391 | 89 | Optionally, the full test suite can be directly executed, although since | ||
392 | 90 | it has some rather exacting dependencies, it may not be able to build | ||
393 | 91 | properly. | ||
394 | 92 | |||
395 | 93 | $ python3 ./setup.py test | ||
396 | 94 | |||
397 | 95 | Next, push a copy of the branch up to launchpad under your own namespace | ||
398 | 96 | for Continuous Integration (CI) testing: | ||
399 | 97 | |||
400 | 98 | $ git push ${LP_USERNAME} ${VERSION}-release | ||
401 | 99 | |||
402 | 100 | Go to the Launchpad page for the branch and create a merge proposal | ||
403 | 101 | targeted to lp:git-ubuntu, set 'main' as the Target branch and set | ||
404 | 102 | the Description to say "For CI build only". Review type can be set to | ||
405 | 103 | 'ci'. This will ensure the regular CI runs on it, which exercises the | ||
406 | 104 | snap build mechanics, but let's the development team know it can be | ||
407 | 105 | ignored for review purposes. This isn't the snap we'll actually be | ||
408 | 106 | using, but will produce one we can download and inspect. | ||
409 | 107 | |||
410 | 108 | A snap candidate (not yet uploaded to the store) can be installed | ||
411 | 109 | locally for testing like this: | ||
412 | 110 | |||
413 | 111 | $ lxc exec ${CONTAINER} -- rm /tmp/git-ubuntu_0+git.*_amd64.snap | ||
414 | 112 | $ lxc file push ./git-ubuntu_0+git.*_amd64.snap ${CONTAINER}/tmp | ||
415 | 113 | $ lxc exec ${CONTAINER} -- bash | ||
416 | 114 | $ sudo snap install --classic --dangerous /tmp/git-ubuntu_0+git.*_amd64.snap | ||
417 | 115 | |||
418 | 116 | The snap package itself can be locally mounted directly as a filesystem, | ||
419 | 117 | which can be helpful for evaluating its contents. For example, to look | ||
420 | 118 | at what Python modules are included: | ||
421 | 119 | |||
422 | 120 | $ mkdir /tmp/snap | ||
423 | 121 | $ sudo mount git-ubuntu_0+git.59a1e51_amd64.snap /tmp/snap/ | ||
424 | 122 | $ ls /tmp/snap/usr/lib/python3.6/ | ||
425 | 123 | $ cd ${HOME} && umount /tmp/snap && rmdir /tmp/snap | ||
426 | 124 | |||
427 | 125 | |||
428 | 126 | 4. Release the new version | ||
429 | 127 | --------------------------- | ||
430 | 128 | |||
431 | 129 | Once everything looks good, merge the change from your local release branch to master: | ||
432 | 130 | |||
433 | 131 | $ git checkout master | ||
434 | 132 | $ git merge --ff-only ${VERSION}-release | ||
435 | 133 | |||
436 | 134 | Make sure everything looks ok. The status should show no uncommitted | ||
437 | 135 | changes, etc. Verify the log shows the correct tags and that HEAD | ||
438 | 136 | points to master, etc. Doublecheck that git describe displays | ||
439 | 137 | ${VERSION}: | ||
440 | 138 | |||
441 | 139 | $ git status | ||
442 | 140 | $ git log --oneline --decorate=short | ||
443 | 141 | $ git describe | ||
444 | 142 | |||
445 | 143 | If all looks good, now push the annotated tag and code changes to origin: | ||
446 | 144 | |||
447 | 145 | $ git push origin master ${VERSION} | ||
448 | 146 | |||
449 | 147 | |||
450 | 148 | 5. Publish Snap | ||
451 | 149 | --------------- | ||
452 | 150 | |||
453 | 151 | Channels used for delivering the snap package are defined as follows: | ||
454 | 152 | |||
455 | 153 | - EDGE: Tracks the latest code in master to allow testing of | ||
456 | 154 | potentially unstable work. This is not recommended for general | ||
457 | 155 | usage by end users. | ||
458 | 156 | |||
459 | 157 | - BETA: Most of the time, this channel will track the same version as | ||
460 | 158 | in STABLE, but also delivers release candidates and sometimes may | ||
461 | 159 | provide early access to new features or bug fixes. This channel is | ||
462 | 160 | recommended particularly for advanced git-ubuntu users who wish to | ||
463 | 161 | participate in testing activities. It is also the channel used for | ||
464 | 162 | the importer on the server. | ||
465 | 163 | |||
466 | 164 | - STABLE: This channel tracks the current release used in the | ||
467 | 165 | git-ubuntu service itself. This is the recommended channel for | ||
468 | 166 | all end users. | ||
469 | 167 | |||
470 | 168 | You will initially publish the package to EDGE only to verify it builds | ||
471 | 169 | properly. | ||
472 | 170 | |||
473 | 171 | First, trigger a rebuild of the snap in the server team's Jenkins | ||
474 | 172 | instance. The git push from step #4 will get picked up by the nightly | ||
475 | 173 | builder, but if you don't wish to wait a day for the build, you can | ||
476 | 174 | manually trigger it on this page: | ||
477 | 175 | |||
478 | 176 | https://jenkins.ubuntu.com/server/job/git-ubuntu-ci-nightly/ | ||
479 | 177 | |||
480 | 178 | Make sure you're logged into Jenkins, then click | ||
481 | 179 | |||
482 | 180 | "Build Now" | ||
483 | 181 | |||
484 | 182 | Once this is done, download the snap from Jenkins. It should be listed | ||
485 | 183 | under Last Successful Artifacts on this page: | ||
486 | 184 | |||
487 | 185 | https://jenkins.ubuntu.com/server/job/git-ubuntu-ci-nightly/ | ||
488 | 186 | |||
489 | 187 | Next, verify you have your snapcraft account configured, logged in, and | ||
490 | 188 | working locally: | ||
491 | 189 | |||
492 | 190 | $ snapcraft whoami | ||
493 | 191 | $ snap list | ||
494 | 192 | |||
495 | 193 | Finally, upload the snap to EDGE: | ||
496 | 194 | |||
497 | 195 | $ snapcraft push --release edge ./git-ubuntu_${VERSION}+git<whatever>_amd64.snap | ||
498 | 196 | |||
499 | 197 | The command will block for a few minutes while the store analyzes the | ||
500 | 198 | snap. Once it is approved, it will become available in the edge channel. | ||
501 | 199 | |||
502 | 200 | For anything but trivial releases, you should then `snap install` the | ||
503 | 201 | edge version of the package in a test environment to verify it. | ||
504 | 202 | |||
505 | 203 | Once you deem it good to go, use the Snapcraft website | ||
506 | 204 | (https://snapcraft.io/git-ubuntu/releases) to copy the snap to BETA, and | ||
507 | 205 | proceed with installing it in production (next step). Solicit broader | ||
508 | 206 | testing, as appropriate, and then after a sufficient amount of testing | ||
509 | 207 | time (e.g. a week or so) copy the snap to STABLE. | ||
510 | 208 | |||
511 | 209 | |||
512 | 210 | 6. Installation to Production | ||
513 | 211 | ----------------------------- | ||
514 | 212 | |||
515 | 213 | See our internal process documentation for details on how to do this. | ||
516 | 214 | |||
517 | 215 | |||
518 | 216 | 7. Announce Release | ||
519 | 217 | ------------------- | ||
520 | 218 | |||
521 | 219 | Email the (gpg signed) announcement to: | ||
522 | 220 | |||
523 | 221 | To: ubuntu-devel@lists.ubuntu.com | ||
524 | 222 | Cc: ubuntu-distributed-devel@lists.ubuntu.com | ||
525 | 223 | |||
526 | 224 | Upload a copy of the announcement to https://launchpad.net/git-ubuntu/ | ||
527 | 225 | |||
528 | 226 | |||
529 | 227 | 8. Close bugs | ||
530 | 228 | ------------- | ||
531 | 229 | |||
532 | 230 | Close all bugs fixed by this release. Here's an example that can be run | ||
533 | 231 | from `lp-shell` to close all bug tasks marked "Fix Committed". If you | ||
534 | 232 | use this, remember to change `VERSION` appropriately: | ||
535 | 233 | |||
536 | 234 | VERSION = '1.0' | ||
537 | 235 | tasks = list(lp.projects['git-ubuntu'].searchTasks(status='Fix Committed')) | ||
538 | 236 | bugs = [lp.load(bug_link) for bug_link in set(task.bug_link for task in tasks)] | ||
539 | 237 | for bug in bugs: | ||
540 | 238 | bug.newMessage( | ||
541 | 239 | subject=f'Fix released in git-ubuntu', | ||
542 | 240 | content=f'Fix released in git-ubuntu version {VERSION}', | ||
543 | 241 | ) | ||
544 | 242 | for task in tasks: | ||
545 | 243 | task.status = 'Fix Released' | ||
546 | 244 | task.lp_save() | ||
547 | 245 | |||
548 | 246 | |||
549 | 247 | 9. Update Trello Card | ||
550 | 248 | --------------------- | ||
551 | 249 | |||
552 | 250 | If a card hasn't been created in the daily-ubuntu-server board for the | ||
553 | 251 | release task already, add one at this point. Add yourself as a member | ||
554 | 252 | of the card, and add labels 'git-ubuntu' and 'highlight'. The latter | ||
555 | 253 | label flags it to be mentioned in the week's Ubuntu Server Developer Summary. | ||
556 | 254 | |||
557 | 255 | |||
558 | 256 | 10. Discourse Blogging (Optional) | ||
559 | 257 | -------------------------------- | ||
560 | 258 | |||
561 | 259 | If desired, follow up with one or more topics/posts to | ||
562 | 260 | discourse.ubuntu.com about the major new features included in the | ||
563 | 261 | release. Discourse posts shouldn't be done just for ordinary bug | ||
564 | 262 | fixing, and shouldn't simply mirror the release announcement or usage | ||
565 | 263 | documentation. | ||
566 | 264 | >>>>>>> doc/release-process.md | ||
567 | diff --git a/gitubuntu/changelog_date_overrides.txt b/gitubuntu/changelog_date_overrides.txt | |||
568 | 0 | new file mode 100644 | 265 | new file mode 100644 |
569 | index 0000000..cdf5e30 | |||
570 | --- /dev/null | |||
571 | +++ b/gitubuntu/changelog_date_overrides.txt | |||
572 | @@ -0,0 +1,19 @@ | |||
573 | 1 | <<<<<<< gitubuntu/changelog_date_overrides.txt | ||
574 | 2 | ======= | ||
575 | 3 | # Package versions that have illegal dates in their changelog entries. | ||
576 | 4 | # In these cases the first seen publication date must be used instead | ||
577 | 5 | # for the author date of a synthesized commit. | ||
578 | 6 | # | ||
579 | 7 | # Note: this file must exactly match the import specification. Before | ||
580 | 8 | # adding an entry here, adjust the specification first. | ||
581 | 9 | |||
582 | 10 | ghostscript 9.50~dfsg-5ubuntu4 | ||
583 | 11 | gmsh 2.0.7-1.2ubuntu1 | ||
584 | 12 | iscsitarget 0.4.15+svn148-2.1ubuntu1 | ||
585 | 13 | lxqt-config 0.13.0-0ubuntu4 | ||
586 | 14 | mail-spf-perl 2.004-0ubuntu1 | ||
587 | 15 | nut 2.2.0-2 | ||
588 | 16 | prips 0.9.4-3 | ||
589 | 17 | prometheus-alertmanager 0.15.3+ds-3ubuntu1 | ||
590 | 18 | software-properties 0.80 | ||
591 | 19 | >>>>>>> gitubuntu/changelog_date_overrides.txt | ||
592 | diff --git a/gitubuntu/changelog_tests/maintainer_name_inner_space b/gitubuntu/changelog_tests/maintainer_name_inner_space | |||
593 | 0 | new file mode 100644 | 20 | new file mode 100644 |
594 | index 0000000..17cc421 | |||
595 | --- /dev/null | |||
596 | +++ b/gitubuntu/changelog_tests/maintainer_name_inner_space | |||
597 | @@ -0,0 +1,8 @@ | |||
598 | 1 | <<<<<<< gitubuntu/changelog_tests/maintainer_name_inner_space | ||
599 | 2 | ======= | ||
600 | 3 | testpkg (1.0) xenial; urgency=medium | ||
601 | 4 | |||
602 | 5 | * Sample entry. | ||
603 | 6 | |||
604 | 7 | -- Test Maintainer <test-maintainer@example.com> Thu, 01 Jan 1970 00:00:00 +0000 | ||
605 | 8 | >>>>>>> gitubuntu/changelog_tests/maintainer_name_inner_space | ||
606 | diff --git a/gitubuntu/changelog_tests/maintainer_name_leading_space b/gitubuntu/changelog_tests/maintainer_name_leading_space | |||
607 | 0 | new file mode 100644 | 9 | new file mode 100644 |
608 | index 0000000..07e9dd8 | |||
609 | --- /dev/null | |||
610 | +++ b/gitubuntu/changelog_tests/maintainer_name_leading_space | |||
611 | @@ -0,0 +1,8 @@ | |||
612 | 1 | <<<<<<< gitubuntu/changelog_tests/maintainer_name_leading_space | ||
613 | 2 | ======= | ||
614 | 3 | testpkg (1.0) xenial; urgency=medium | ||
615 | 4 | |||
616 | 5 | * Sample entry. | ||
617 | 6 | |||
618 | 7 | -- Test Maintainer <test-maintainer@example.com> Thu, 01 Jan 1970 00:00:00 +0000 | ||
619 | 8 | >>>>>>> gitubuntu/changelog_tests/maintainer_name_leading_space | ||
620 | diff --git a/gitubuntu/changelog_tests/maintainer_name_trailing_space b/gitubuntu/changelog_tests/maintainer_name_trailing_space | |||
621 | 0 | new file mode 100644 | 9 | new file mode 100644 |
622 | index 0000000..358b4a3 | |||
623 | --- /dev/null | |||
624 | +++ b/gitubuntu/changelog_tests/maintainer_name_trailing_space | |||
625 | @@ -0,0 +1,8 @@ | |||
626 | 1 | <<<<<<< gitubuntu/changelog_tests/maintainer_name_trailing_space | ||
627 | 2 | ======= | ||
628 | 3 | testpkg (1.0) xenial; urgency=medium | ||
629 | 4 | |||
630 | 5 | * Sample entry. | ||
631 | 6 | |||
632 | 7 | -- Test Maintainer <test-maintainer@example.com> Thu, 01 Jan 1970 00:00:00 +0000 | ||
633 | 8 | >>>>>>> gitubuntu/changelog_tests/maintainer_name_trailing_space | ||
634 | diff --git a/gitubuntu/changelog_tests/test_date_1 b/gitubuntu/changelog_tests/test_date_1 | |||
635 | 0 | new file mode 100644 | 9 | new file mode 100644 |
636 | index 0000000..3c54cec | |||
637 | --- /dev/null | |||
638 | +++ b/gitubuntu/changelog_tests/test_date_1 | |||
639 | @@ -0,0 +1,8 @@ | |||
640 | 1 | <<<<<<< gitubuntu/changelog_tests/test_date_1 | ||
641 | 2 | ======= | ||
642 | 3 | testpkg (1.0) xenial; urgency=medium | ||
643 | 4 | |||
644 | 5 | * Sample entry. | ||
645 | 6 | |||
646 | 7 | -- Test Maintainer <test-maintainer@donotmail.com> Mon, 12 May 2016 08:14:34 -0700 | ||
647 | 8 | >>>>>>> gitubuntu/changelog_tests/test_date_1 | ||
648 | diff --git a/gitubuntu/changelog_tests/test_date_2 b/gitubuntu/changelog_tests/test_date_2 | |||
649 | 0 | new file mode 100644 | 9 | new file mode 100644 |
650 | index 0000000..9defc1f | |||
651 | --- /dev/null | |||
652 | +++ b/gitubuntu/changelog_tests/test_date_2 | |||
653 | @@ -0,0 +1,8 @@ | |||
654 | 1 | <<<<<<< gitubuntu/changelog_tests/test_date_2 | ||
655 | 2 | ======= | ||
656 | 3 | testpkg (1.0) xenial; urgency=medium | ||
657 | 4 | |||
658 | 5 | * Sample entry. | ||
659 | 6 | |||
660 | 7 | -- Test Maintainer <test-maintainer@donotmail.com> Mon, 12 May 2016 08:14:34 -0700 | ||
661 | 8 | >>>>>>> gitubuntu/changelog_tests/test_date_2 | ||
662 | diff --git a/gitubuntu/changelog_tests/test_distribution b/gitubuntu/changelog_tests/test_distribution | |||
663 | 0 | new file mode 100644 | 9 | new file mode 100644 |
664 | index 0000000..099b2b2 | |||
665 | --- /dev/null | |||
666 | +++ b/gitubuntu/changelog_tests/test_distribution | |||
667 | @@ -0,0 +1,8 @@ | |||
668 | 1 | <<<<<<< gitubuntu/changelog_tests/test_distribution | ||
669 | 2 | ======= | ||
670 | 3 | testpkg (1.0) xenial; urgency=medium | ||
671 | 4 | |||
672 | 5 | * Sample entry. | ||
673 | 6 | |||
674 | 7 | -- Test Maintainer <test-maintainer@donotmail.com> Mon, 12 May 2016 08:14:34 -0700 | ||
675 | 8 | >>>>>>> gitubuntu/changelog_tests/test_distribution | ||
676 | diff --git a/gitubuntu/changelog_tests/test_distribution_source_1 b/gitubuntu/changelog_tests/test_distribution_source_1 | |||
677 | 0 | new file mode 100644 | 9 | new file mode 100644 |
678 | index 0000000..dbb4587 | |||
679 | --- /dev/null | |||
680 | +++ b/gitubuntu/changelog_tests/test_distribution_source_1 | |||
681 | @@ -0,0 +1,8 @@ | |||
682 | 1 | <<<<<<< gitubuntu/changelog_tests/test_distribution_source_1 | ||
683 | 2 | ======= | ||
684 | 3 | testpkg (1.0) xenial; urgency=medium | ||
685 | 4 | |||
686 | 5 | * Sample entry. | ||
687 | 6 | |||
688 | 7 | -- Test Maintainer <test-maintainer@donotmail.com> Mon, 12 May 2016 08:14:34 -0700 | ||
689 | 8 | >>>>>>> gitubuntu/changelog_tests/test_distribution_source_1 | ||
690 | diff --git a/gitubuntu/changelog_tests/test_distribution_source_2 b/gitubuntu/changelog_tests/test_distribution_source_2 | |||
691 | 0 | new file mode 100644 | 9 | new file mode 100644 |
692 | index 0000000..04abafe | |||
693 | --- /dev/null | |||
694 | +++ b/gitubuntu/changelog_tests/test_distribution_source_2 | |||
695 | @@ -0,0 +1,8 @@ | |||
696 | 1 | <<<<<<< gitubuntu/changelog_tests/test_distribution_source_2 | ||
697 | 2 | ======= | ||
698 | 3 | testpkg (1.0) zesty-security; urgency=medium | ||
699 | 4 | |||
700 | 5 | * Sample entry. | ||
701 | 6 | |||
702 | 7 | -- Test Maintainer <test-maintainer@donotmail.com> Mon, 12 May 2016 08:14:34 -0700 | ||
703 | 8 | >>>>>>> gitubuntu/changelog_tests/test_distribution_source_2 | ||
704 | diff --git a/gitubuntu/changelog_tests/test_distribution_source_3 b/gitubuntu/changelog_tests/test_distribution_source_3 | |||
705 | 0 | new file mode 100644 | 9 | new file mode 100644 |
706 | index 0000000..f89aa4a | |||
707 | --- /dev/null | |||
708 | +++ b/gitubuntu/changelog_tests/test_distribution_source_3 | |||
709 | @@ -0,0 +1,8 @@ | |||
710 | 1 | <<<<<<< gitubuntu/changelog_tests/test_distribution_source_3 | ||
711 | 2 | ======= | ||
712 | 3 | testpkg (1.0) unstable; urgency=medium | ||
713 | 4 | |||
714 | 5 | * Sample entry. | ||
715 | 6 | |||
716 | 7 | -- Test Maintainer <test-maintainer@donotmail.com> Mon, 12 May 2016 08:14:34 -0700 | ||
717 | 8 | >>>>>>> gitubuntu/changelog_tests/test_distribution_source_3 | ||
718 | diff --git a/gitubuntu/changelog_tests/test_distribution_source_4 b/gitubuntu/changelog_tests/test_distribution_source_4 | |||
719 | 0 | new file mode 100644 | 9 | new file mode 100644 |
720 | index 0000000..8c8fb68 | |||
721 | --- /dev/null | |||
722 | +++ b/gitubuntu/changelog_tests/test_distribution_source_4 | |||
723 | @@ -0,0 +1,8 @@ | |||
724 | 1 | <<<<<<< gitubuntu/changelog_tests/test_distribution_source_4 | ||
725 | 2 | ======= | ||
726 | 3 | testpkg (1.0) devel; urgency=medium | ||
727 | 4 | |||
728 | 5 | * Sample entry. | ||
729 | 6 | |||
730 | 7 | -- Test Maintainer <test-maintainer@donotmail.com> Mon, 12 May 2016 08:14:34 -0700 | ||
731 | 8 | >>>>>>> gitubuntu/changelog_tests/test_distribution_source_4 | ||
732 | diff --git a/gitubuntu/changelog_tests/test_maintainer_1 b/gitubuntu/changelog_tests/test_maintainer_1 | |||
733 | 0 | new file mode 100644 | 9 | new file mode 100644 |
734 | index 0000000..62a89ff | |||
735 | --- /dev/null | |||
736 | +++ b/gitubuntu/changelog_tests/test_maintainer_1 | |||
737 | @@ -0,0 +1,8 @@ | |||
738 | 1 | <<<<<<< gitubuntu/changelog_tests/test_maintainer_1 | ||
739 | 2 | ======= | ||
740 | 3 | testpkg (1.0) xenial; urgency=medium | ||
741 | 4 | |||
742 | 5 | * Sample entry. | ||
743 | 6 | |||
744 | 7 | -- Test Maintainer <test-maintainer@donotmail.com> Thu, 01 Jan 1970 00:00:00 +0000 | ||
745 | 8 | >>>>>>> gitubuntu/changelog_tests/test_maintainer_1 | ||
746 | diff --git a/gitubuntu/changelog_tests/test_maintainer_2 b/gitubuntu/changelog_tests/test_maintainer_2 | |||
747 | 0 | new file mode 100644 | 9 | new file mode 100644 |
748 | index 0000000..7b8db29 | |||
749 | --- /dev/null | |||
750 | +++ b/gitubuntu/changelog_tests/test_maintainer_2 | |||
751 | @@ -0,0 +1,8 @@ | |||
752 | 1 | <<<<<<< gitubuntu/changelog_tests/test_maintainer_2 | ||
753 | 2 | ======= | ||
754 | 3 | testpkg (1.0) xenial; urgency=medium | ||
755 | 4 | |||
756 | 5 | * Sample entry. | ||
757 | 6 | |||
758 | 7 | -- <test-maintainer@donotmail.com> Thu, 01 Jan 1970 00:00:00 +0000 | ||
759 | 8 | >>>>>>> gitubuntu/changelog_tests/test_maintainer_2 | ||
760 | diff --git a/gitubuntu/changelog_tests/test_maintainer_3 b/gitubuntu/changelog_tests/test_maintainer_3 | |||
761 | 0 | new file mode 100644 | 9 | new file mode 100644 |
762 | index 0000000..195564a | |||
763 | --- /dev/null | |||
764 | +++ b/gitubuntu/changelog_tests/test_maintainer_3 | |||
765 | @@ -0,0 +1,8 @@ | |||
766 | 1 | <<<<<<< gitubuntu/changelog_tests/test_maintainer_3 | ||
767 | 2 | ======= | ||
768 | 3 | testpkg (1.0) xenial; urgency=medium | ||
769 | 4 | |||
770 | 5 | * Sample entry. | ||
771 | 6 | |||
772 | 7 | -- <test-maintainer@donotmail.com> Thu, 01 Jan 1970 00:00:00 +0000 | ||
773 | 8 | >>>>>>> gitubuntu/changelog_tests/test_maintainer_3 | ||
774 | diff --git a/gitubuntu/changelog_tests/test_versions_1 b/gitubuntu/changelog_tests/test_versions_1 | |||
775 | 0 | new file mode 100644 | 9 | new file mode 100644 |
776 | index 0000000..ec94bc5 | |||
777 | --- /dev/null | |||
778 | +++ b/gitubuntu/changelog_tests/test_versions_1 | |||
779 | @@ -0,0 +1,8 @@ | |||
780 | 1 | <<<<<<< gitubuntu/changelog_tests/test_versions_1 | ||
781 | 2 | ======= | ||
782 | 3 | testpkg (1.0) xenial; urgency=medium | ||
783 | 4 | |||
784 | 5 | * Sample entry. | ||
785 | 6 | |||
786 | 7 | -- Test Maintainer <test-maintainer@donotmail.com> Mon, 12 May 2016 08:14:34 -0700 | ||
787 | 8 | >>>>>>> gitubuntu/changelog_tests/test_versions_1 | ||
788 | diff --git a/gitubuntu/changelog_tests/test_versions_2 b/gitubuntu/changelog_tests/test_versions_2 | |||
789 | 0 | new file mode 100644 | 9 | new file mode 100644 |
790 | index 0000000..ed6ade9 | |||
791 | --- /dev/null | |||
792 | +++ b/gitubuntu/changelog_tests/test_versions_2 | |||
793 | @@ -0,0 +1,14 @@ | |||
794 | 1 | <<<<<<< gitubuntu/changelog_tests/test_versions_2 | ||
795 | 2 | ======= | ||
796 | 3 | testpkg (2.0) xenial; urgency=medium | ||
797 | 4 | |||
798 | 5 | * Sample entry 2. | ||
799 | 6 | |||
800 | 7 | -- Test Maintainer <test-maintainer@donotmail.com> Mon, 27 Aug 2016 12:10:34 -0700 | ||
801 | 8 | |||
802 | 9 | testpkg (1.0) xenial; urgency=medium | ||
803 | 10 | |||
804 | 11 | * Sample entry 1. | ||
805 | 12 | |||
806 | 13 | -- Test Maintainer <test-maintainer@donotmail.com> Mon, 12 May 2016 08:14:34 -0700 | ||
807 | 14 | >>>>>>> gitubuntu/changelog_tests/test_versions_2 | ||
808 | diff --git a/gitubuntu/changelog_tests/test_versions_3 b/gitubuntu/changelog_tests/test_versions_3 | |||
809 | 0 | new file mode 100644 | 15 | new file mode 100644 |
810 | index 0000000..8fbf944 | |||
811 | --- /dev/null | |||
812 | +++ b/gitubuntu/changelog_tests/test_versions_3 | |||
813 | @@ -0,0 +1,26 @@ | |||
814 | 1 | <<<<<<< gitubuntu/changelog_tests/test_versions_3 | ||
815 | 2 | ======= | ||
816 | 3 | testpkg (4.0) zesty; urgency=medium | ||
817 | 4 | |||
818 | 5 | * Sample entry 4. | ||
819 | 6 | |||
820 | 7 | -- Test Maintainer <test-maintainer@donotmail.com> Mon, 03 Apr 2017 18:04:01 -0700 | ||
821 | 8 | |||
822 | 9 | testpkg (3.0) yakkety; urgency=medium | ||
823 | 10 | |||
824 | 11 | * Sample entry 3. | ||
825 | 12 | |||
826 | 13 | -- Test Maintainer <test-maintainer@donotmail.com> Fri, 10 Nov 2016 03:34:10 -0700 | ||
827 | 14 | |||
828 | 15 | testpkg (2.0) xenial; urgency=medium | ||
829 | 16 | |||
830 | 17 | * Sample entry 2. | ||
831 | 18 | |||
832 | 19 | -- Test Maintainer <test-maintainer@donotmail.com> Sat, 27 Aug 2016 12:10:55 -0700 | ||
833 | 20 | |||
834 | 21 | testpkg (1.0) xenial; urgency=medium | ||
835 | 22 | |||
836 | 23 | * Sample entry 1. | ||
837 | 24 | |||
838 | 25 | -- Test Maintainer <test-maintainer@donotmail.com> Thu, 12 May 2016 08:14:34 -0700 | ||
839 | 26 | >>>>>>> gitubuntu/changelog_tests/test_versions_3 | ||
840 | diff --git a/gitubuntu/clone.py b/gitubuntu/clone.py | |||
841 | 0 | new file mode 100644 | 27 | new file mode 100644 |
842 | index 0000000..aff52be | |||
843 | --- /dev/null | |||
844 | +++ b/gitubuntu/clone.py | |||
845 | @@ -0,0 +1,178 @@ | |||
846 | 1 | <<<<<<< gitubuntu/clone.py | ||
847 | 2 | ======= | ||
848 | 3 | import argparse | ||
849 | 4 | import logging | ||
850 | 5 | import os | ||
851 | 6 | import re | ||
852 | 7 | import shutil | ||
853 | 8 | from subprocess import CalledProcessError | ||
854 | 9 | import sys | ||
855 | 10 | from gitubuntu.__main__ import top_level_defaults | ||
856 | 11 | from gitubuntu.git_repository import ( | ||
857 | 12 | GitUbuntuRepository, | ||
858 | 13 | GitUbuntuRepositoryFetchError, | ||
859 | 14 | ) | ||
860 | 15 | from gitubuntu.run import decode_binary, run | ||
861 | 16 | |||
862 | 17 | import pkg_resources | ||
863 | 18 | import pygit2 | ||
864 | 19 | |||
865 | 20 | def copy_hooks(src, dst): | ||
866 | 21 | try: | ||
867 | 22 | os.mkdir(dst) | ||
868 | 23 | except FileExistsError: | ||
869 | 24 | pass | ||
870 | 25 | |||
871 | 26 | for hook in os.listdir(src): | ||
872 | 27 | shutil.copy2( | ||
873 | 28 | os.path.join(src, hook), | ||
874 | 29 | dst, | ||
875 | 30 | ) | ||
876 | 31 | |||
877 | 32 | def main( | ||
878 | 33 | package, | ||
879 | 34 | directory=None, | ||
880 | 35 | lp_user=None, | ||
881 | 36 | proto=top_level_defaults.proto, | ||
882 | 37 | ): | ||
883 | 38 | """Entry point to clone subcommand | ||
884 | 39 | |||
885 | 40 | @package: Name of source package | ||
886 | 41 | @directory: directory to clone the repository into | ||
887 | 42 | @lp_user: user to authenticate to Launchpad as | ||
888 | 43 | @proto: string protocol to use (one of 'http', 'https', 'git') | ||
889 | 44 | |||
890 | 45 | If directory is None, a relative directory with the same name as | ||
891 | 46 | package will be used. | ||
892 | 47 | |||
893 | 48 | If lp_user is None, value of `git config gitubuntu.lpuser` will be | ||
894 | 49 | used. | ||
895 | 50 | |||
896 | 51 | Returns the resulting GitUbuntuRepository object, if successful; | ||
897 | 52 | None otherwise. | ||
898 | 53 | """ | ||
899 | 54 | directory = ( | ||
900 | 55 | os.path.abspath(directory) | ||
901 | 56 | if directory | ||
902 | 57 | else os.path.join(os.path.abspath(os.getcwd()), package) | ||
903 | 58 | ) | ||
904 | 59 | if os.path.isdir(directory): | ||
905 | 60 | logging.error('directory %s exists' % directory) | ||
906 | 61 | return None | ||
907 | 62 | |||
908 | 63 | local_repo = GitUbuntuRepository( | ||
909 | 64 | local_dir=directory, | ||
910 | 65 | lp_user=lp_user, | ||
911 | 66 | fetch_proto=proto, | ||
912 | 67 | ) | ||
913 | 68 | |||
914 | 69 | copy_hooks( | ||
915 | 70 | pkg_resources.resource_filename( | ||
916 | 71 | 'gitubuntu', | ||
917 | 72 | 'hooks', | ||
918 | 73 | ), | ||
919 | 74 | os.path.join( | ||
920 | 75 | directory, | ||
921 | 76 | os.getenv('GIT_DIR', '.git'), | ||
922 | 77 | 'hooks', | ||
923 | 78 | ), | ||
924 | 79 | ) | ||
925 | 80 | |||
926 | 81 | local_repo.add_base_remotes(package) | ||
927 | 82 | try: | ||
928 | 83 | local_repo.fetch_base_remotes(verbose=True) | ||
929 | 84 | except GitUbuntuRepositoryFetchError: | ||
930 | 85 | logging.error("Unable to find an imported repository for %s. " | ||
931 | 86 | "Please request an import by e-mailing " | ||
932 | 87 | "ubuntu-distributed-devel@lists.ubuntu.com.", | ||
933 | 88 | package | ||
934 | 89 | ) | ||
935 | 90 | shutil.rmtree(local_repo.local_dir) | ||
936 | 91 | return None | ||
937 | 92 | |||
938 | 93 | local_repo.add_lpuser_remote(pkgname=package) | ||
939 | 94 | logging.debug("added remote '%s' -> %s", local_repo.lp_user, | ||
940 | 95 | local_repo.raw_repo.remotes[local_repo.lp_user].url | ||
941 | 96 | ) | ||
942 | 97 | try: | ||
943 | 98 | local_repo.fetch_lpuser_remote(verbose=True) | ||
944 | 99 | except GitUbuntuRepositoryFetchError: | ||
945 | 100 | pass | ||
946 | 101 | |||
947 | 102 | try: | ||
948 | 103 | local_repo.create_tracking_branch( | ||
949 | 104 | 'ubuntu/devel', | ||
950 | 105 | 'pkg/ubuntu/devel' | ||
951 | 106 | ) | ||
952 | 107 | local_repo.checkout_commitish('ubuntu/devel') | ||
953 | 108 | except: | ||
954 | 109 | logging.error('Unable to checkout ubuntu/devel, does ' | ||
955 | 110 | 'pkg/ubuntu/devel branch exist?' | ||
956 | 111 | ) | ||
957 | 112 | |||
958 | 113 | local_repo.git_run(['config', 'notes.displayRef', 'refs/notes/changelog']) | ||
959 | 114 | |||
960 | 115 | if os.path.isfile(os.path.join(directory, '.gitignore')): | ||
961 | 116 | logging.warning('A .gitignore file exists in the source ' | ||
962 | 117 | 'package. This will affect the behavior of git. Consider ' | ||
963 | 118 | 'backing up the gitignore while working on this package ' | ||
964 | 119 | 'to ensure all changes are tracked or passing appropriate ' | ||
965 | 120 | 'flags to git commands (e.g., git status --ignored).' | ||
966 | 121 | ) | ||
967 | 122 | |||
968 | 123 | return local_repo | ||
969 | 124 | |||
970 | 125 | def parse_args(subparsers=None, base_subparsers=None): | ||
971 | 126 | kwargs = dict( | ||
972 | 127 | description='Clone a source package git repository to a directory', | ||
973 | 128 | formatter_class=argparse.RawTextHelpFormatter, | ||
974 | 129 | epilog=''' | ||
975 | 130 | Example: | ||
976 | 131 | * clone to open-iscsi/ | ||
977 | 132 | %(prog)s open-iscsi | ||
978 | 133 | * clone to ubuntu.git | ||
979 | 134 | %(prog)s open-iscsi ubuntu.git | ||
980 | 135 | * use git rather than https protocol for remotes: | ||
981 | 136 | %(prog)s --proto=git open-iscsi | ||
982 | 137 | ''' | ||
983 | 138 | ) | ||
984 | 139 | if base_subparsers: | ||
985 | 140 | kwargs['parents'] = base_subparsers | ||
986 | 141 | if subparsers: | ||
987 | 142 | parser = subparsers.add_parser('clone', **kwargs) | ||
988 | 143 | parser.set_defaults(func=cli_main) | ||
989 | 144 | else: | ||
990 | 145 | parser = argparse.ArgumentParser(**kwargs) | ||
991 | 146 | parser.add_argument('package', type=str, | ||
992 | 147 | help='Name of source package to clone' | ||
993 | 148 | ) | ||
994 | 149 | parser.add_argument('directory', type=str, | ||
995 | 150 | help='Local directory to clone to. If not specified, a ' | ||
996 | 151 | ' directory with the same name as PACKAGE will be ' | ||
997 | 152 | 'used', | ||
998 | 153 | default=None, | ||
999 | 154 | nargs='?' | ||
1000 | 155 | ) | ||
1001 | 156 | parser.add_argument('-l', '--lp-user', type=str, help=argparse.SUPPRESS) | ||
1002 | 157 | if not subparsers: | ||
1003 | 158 | return parser.parse_args() | ||
1004 | 159 | return 'clone - %s' % kwargs['description'] | ||
1005 | 160 | |||
1006 | 161 | def cli_main(args): | ||
1007 | 162 | try: | ||
1008 | 163 | lp_user = args.lp_user | ||
1009 | 164 | except AttributeError: | ||
1010 | 165 | lp_user = None | ||
1011 | 166 | |||
1012 | 167 | if main( | ||
1013 | 168 | package=args.package, | ||
1014 | 169 | directory=args.directory, | ||
1015 | 170 | lp_user=lp_user, | ||
1016 | 171 | proto=args.proto, | ||
1017 | 172 | ) is not None: | ||
1018 | 173 | return 0 | ||
1019 | 174 | return 1 | ||
1020 | 175 | |||
1021 | 176 | |||
1022 | 177 | # vi: ts=4 expandtab | ||
1023 | 178 | >>>>>>> gitubuntu/clone.py | ||
1024 | diff --git a/gitubuntu/git_repository.py b/gitubuntu/git_repository.py | |||
1025 | 0 | new file mode 100644 | 179 | new file mode 100644 |
1026 | index 0000000..9a81860 | |||
1027 | --- /dev/null | |||
1028 | +++ b/gitubuntu/git_repository.py | |||
1029 | @@ -0,0 +1,3026 @@ | |||
1030 | 1 | <<<<<<< gitubuntu/git_repository.py | ||
1031 | 2 | ======= | ||
1032 | 3 | ### XXX: can we reduce number of calls to dpkg-parsechangelog | ||
1033 | 4 | ### XXX: is any of this data in lp already? | ||
1034 | 5 | |||
1035 | 6 | import collections | ||
1036 | 7 | from contextlib import contextmanager | ||
1037 | 8 | from copy import copy | ||
1038 | 9 | import datetime | ||
1039 | 10 | import enum | ||
1040 | 11 | from functools import lru_cache | ||
1041 | 12 | import itertools | ||
1042 | 13 | import logging | ||
1043 | 14 | import os | ||
1044 | 15 | import posixpath | ||
1045 | 16 | import re | ||
1046 | 17 | import shutil | ||
1047 | 18 | import stat | ||
1048 | 19 | from subprocess import CalledProcessError | ||
1049 | 20 | import sys | ||
1050 | 21 | import tempfile | ||
1051 | 22 | from gitubuntu.__main__ import top_level_defaults | ||
1052 | 23 | import gitubuntu.build | ||
1053 | 24 | from gitubuntu.dsc import component_tarball_matches | ||
1054 | 25 | from gitubuntu.patch_state import PatchState | ||
1055 | 26 | from gitubuntu.run import ( | ||
1056 | 27 | decode_binary, | ||
1057 | 28 | run, | ||
1058 | 29 | runq, | ||
1059 | 30 | run_gbp, | ||
1060 | 31 | run_quilt, | ||
1061 | 32 | ) | ||
1062 | 33 | import gitubuntu.spec | ||
1063 | 34 | from gitubuntu.test_util import get_test_changelog | ||
1064 | 35 | import gitubuntu.versioning | ||
1065 | 36 | import debian.changelog | ||
1066 | 37 | import debian.debian_support | ||
1067 | 38 | import pygit2 | ||
1068 | 39 | import pytest | ||
1069 | 40 | |||
1070 | 41 | |||
1071 | 42 | def _follow_symlinks_to_blob(repo, top_tree_object, search_path, | ||
1072 | 43 | _rel_tree=None, _rel_path='' | ||
1073 | 44 | ): | ||
1074 | 45 | '''Recursively follow a path down a tree, following symlinks, to find blob | ||
1075 | 46 | |||
1076 | 47 | repo: pygit2.Repository object | ||
1077 | 48 | top_tree: pygit2.Tree object of the top of the tree structure | ||
1078 | 49 | search_path: '/'-separated path string of blob to find | ||
1079 | 50 | _rel_tree: (internal) which tree to look further into | ||
1080 | 51 | _rel_path: (internal) the path we are in so far | ||
1081 | 52 | ''' | ||
1082 | 53 | |||
1083 | 54 | NORMAL_BLOB_MODES = set([ | ||
1084 | 55 | pygit2.GIT_FILEMODE_BLOB, | ||
1085 | 56 | pygit2.GIT_FILEMODE_BLOB_EXECUTABLE, | ||
1086 | 57 | ]) | ||
1087 | 58 | |||
1088 | 59 | _rel_tree = _rel_tree or top_tree_object | ||
1089 | 60 | head, tail = posixpath.split(search_path) | ||
1090 | 61 | |||
1091 | 62 | # A traditional functional split would put a single entry in head with tail | ||
1092 | 63 | # empty, but posixpath.split doesn't necessarily do this. Jiggle it round | ||
1093 | 64 | # to make it appear to have traditional semantics. | ||
1094 | 65 | if not head: | ||
1095 | 66 | head = tail | ||
1096 | 67 | tail = None | ||
1097 | 68 | |||
1098 | 69 | entry = _rel_tree[head] | ||
1099 | 70 | if entry.type in [pygit2.GIT_OBJ_TREE, 'tree']: | ||
1100 | 71 | return _follow_symlinks_to_blob( | ||
1101 | 72 | repo=repo, | ||
1102 | 73 | top_tree_object=top_tree_object, | ||
1103 | 74 | search_path=tail, | ||
1104 | 75 | _rel_tree=repo.get(entry.id), | ||
1105 | 76 | _rel_path=posixpath.join(_rel_path, head), | ||
1106 | 77 | ) | ||
1107 | 78 | elif entry.type in [pygit2.GIT_OBJ_BLOB, 'blob'] and entry.filemode == pygit2.GIT_FILEMODE_LINK: | ||
1108 | 79 | # Found a symlink. Start again from the top with adjustment for symlink | ||
1109 | 80 | # following | ||
1110 | 81 | target_tail = [decode_binary(repo.get(entry.id).data)] | ||
1111 | 82 | if tail is not None: | ||
1112 | 83 | target_tail.append(tail) | ||
1113 | 84 | search_path = posixpath.normpath( | ||
1114 | 85 | posixpath.join(_rel_path, *target_tail) | ||
1115 | 86 | ) | ||
1116 | 87 | return _follow_symlinks_to_blob( | ||
1117 | 88 | repo=repo, | ||
1118 | 89 | top_tree_object=top_tree_object, | ||
1119 | 90 | search_path=search_path, | ||
1120 | 91 | ) | ||
1121 | 92 | elif entry.type in [pygit2.GIT_OBJ_BLOB, 'blob'] and entry.filemode in NORMAL_BLOB_MODES: | ||
1122 | 93 | return repo.get(entry.id) | ||
1123 | 94 | else: | ||
1124 | 95 | # Found some special entry such as a "gitlink" (submodule entry) | ||
1125 | 96 | raise ValueError( | ||
1126 | 97 | "Found %r filemode %r looking for %r" % | ||
1127 | 98 | (entry, entry.filemode, posixpath.join(_rel_path, search_path)) | ||
1128 | 99 | ) | ||
1129 | 100 | |||
1130 | 101 | |||
1131 | 102 | def follow_symlinks_to_blob(repo, treeish_object, path): | ||
1132 | 103 | return _follow_symlinks_to_blob( | ||
1133 | 104 | repo=repo, | ||
1134 | 105 | top_tree_object=treeish_object.peel(pygit2.Tree), | ||
1135 | 106 | search_path=posixpath.normpath(path), | ||
1136 | 107 | ) | ||
1137 | 108 | |||
1138 | 109 | |||
1139 | 110 | def _derive_git_cli_env( | ||
1140 | 111 | pygit2_repo, | ||
1141 | 112 | initial_env=None, | ||
1142 | 113 | update_env=None, | ||
1143 | 114 | work_tree_path=None, | ||
1144 | 115 | index_path=None, | ||
1145 | 116 | ): | ||
1146 | 117 | """Calculate the environment to be used in a call to the git CLI | ||
1147 | 118 | |||
1148 | 119 | :param pygit2.Repository pygit2_repo: the repository for which to calculate | ||
1149 | 120 | the environment | ||
1150 | 121 | :param dict initial_env: the environment to start with | ||
1151 | 122 | :param dict update_env: additional environment setings with which to | ||
1152 | 123 | override the result | ||
1153 | 124 | :param str work_tree_path: in the case of an alternate work tree being | ||
1154 | 125 | used, specify this here and GIT_WORK_TREE will be set to it instead of | ||
1155 | 126 | the default being taken from the work tree used by pygit2_repo | ||
1156 | 127 | :param str index_path: if an alternate index is being used, specify it here | ||
1157 | 128 | and GIT_INDEX_FILE will be set accordingly. | ||
1158 | 129 | :rtype: dict | ||
1159 | 130 | :returns: a dictionary representing the environment with which to call the | ||
1160 | 131 | git CLI | ||
1161 | 132 | |||
1162 | 133 | This function encapsulates the setting of the GIT_DIR, GIT_WORK_TREE and | ||
1163 | 134 | GIT_INDEX_FILE environment variables as necessary. The provided | ||
1164 | 135 | pygit2.Repository instance is used to determine these values. initial_env, | ||
1165 | 136 | if provided, specifies the initial environment to use instead of defaulting | ||
1166 | 137 | to the process' current environment. update_env allows extra environment | ||
1167 | 138 | variables to be added as well as the override of any variables set by this | ||
1168 | 139 | function, including GIT_DIR, GIT_WORK_TREE and GIT_INDEX_FILE. | ||
1169 | 140 | """ | ||
1170 | 141 | if initial_env is None: | ||
1171 | 142 | env = os.environ.copy() | ||
1172 | 143 | else: | ||
1173 | 144 | env = initial_env.copy() | ||
1174 | 145 | |||
1175 | 146 | env['GIT_DIR'] = pygit2_repo.path | ||
1176 | 147 | |||
1177 | 148 | if work_tree_path is None: | ||
1178 | 149 | env['GIT_WORK_TREE'] = pygit2_repo.workdir | ||
1179 | 150 | else: | ||
1180 | 151 | env['GIT_WORK_TREE'] = work_tree_path | ||
1181 | 152 | |||
1182 | 153 | if index_path is not None: | ||
1183 | 154 | env['GIT_INDEX_FILE'] = index_path | ||
1184 | 155 | |||
1185 | 156 | if update_env: | ||
1186 | 157 | env.update(update_env) | ||
1187 | 158 | |||
1188 | 159 | return env | ||
1189 | 160 | |||
1190 | 161 | |||
1191 | 162 | def _derive_target_branch_string(remote_branch_objects): | ||
1192 | 163 | '''Given a list of branch objects, return the name of the one to use as the target branch | ||
1193 | 164 | |||
1194 | 165 | Returns either one of the branch objects' names, or the empty string | ||
1195 | 166 | to indicate no suitable candidate. | ||
1196 | 167 | ''' | ||
1197 | 168 | if len(remote_branch_objects) == 0: | ||
1198 | 169 | logging.error("Unable to automatically determine importer " | ||
1199 | 170 | "branch: No candidate branches found." | ||
1200 | 171 | ) | ||
1201 | 172 | return '' | ||
1202 | 173 | remote_branch_strings = [ | ||
1203 | 174 | b.branch_name for b in remote_branch_objects | ||
1204 | 175 | ] | ||
1205 | 176 | if len(remote_branch_objects) > 1: | ||
1206 | 177 | # do the trees of each branch's tip match? | ||
1207 | 178 | if len( | ||
1208 | 179 | set(b.peel(pygit2.Tree).id for b in remote_branch_objects) | ||
1209 | 180 | ) != 1: | ||
1210 | 181 | logging.error("Unable to automatically determine importer " | ||
1211 | 182 | "branch: Multiple candidate branches found and " | ||
1212 | 183 | "their trees do not match: %s. This might be a " | ||
1213 | 184 | "bug in `git ubuntu lint`, please report it at " | ||
1214 | 185 | "https://bugs.launchpad.net/git-ubuntu. " | ||
1215 | 186 | "Please pass --target-branch.", | ||
1216 | 187 | ", ".join(remote_branch_strings) | ||
1217 | 188 | ) | ||
1218 | 189 | return '' | ||
1219 | 190 | # is ubuntu/devel one of the candidates? | ||
1220 | 191 | try: | ||
1221 | 192 | return [ | ||
1222 | 193 | b for b in remote_branch_strings if 'ubuntu/devel' in b | ||
1223 | 194 | ].pop() | ||
1224 | 195 | except IndexError: | ||
1225 | 196 | pass | ||
1226 | 197 | # are all candidate branches for the same series? | ||
1227 | 198 | pkg_remote_branch_serieses = set( | ||
1228 | 199 | # remove the prefix, trim the distribution and | ||
1229 | 200 | # extract the series | ||
1230 | 201 | b[len('pkg/'):].split('/')[1].split('-')[0] for | ||
1231 | 202 | b in remote_branch_strings | ||
1232 | 203 | ) | ||
1233 | 204 | if len(pkg_remote_branch_serieses) != 1: | ||
1234 | 205 | logging.error("Unable to automatically determine importer " | ||
1235 | 206 | "branch: Multiple candidate branches found and " | ||
1236 | 207 | "they do not target the same series: %s. Please pass " | ||
1237 | 208 | "--target-branch.", ", ".join(remote_branch_strings) | ||
1238 | 209 | ) | ||
1239 | 210 | return '' | ||
1240 | 211 | # is a -devel branch present? | ||
1241 | 212 | if not any('-devel' in b for b in remote_branch_strings): | ||
1242 | 213 | logging.error("Unable to automatically determine importer " | ||
1243 | 214 | "branch: Multiple candidate branches found and " | ||
1244 | 215 | "none appear to be a -devel branch: %s. Please " | ||
1245 | 216 | "pass --target-branch.", ", ".join(remote_branch_strings) | ||
1246 | 217 | ) | ||
1247 | 218 | return '' | ||
1248 | 219 | # if so, favor -devel | ||
1249 | 220 | remote_branch_strings = [ | ||
1250 | 221 | b for b in remote_branch_strings if '-devel' in b | ||
1251 | 222 | ] | ||
1252 | 223 | return remote_branch_strings.pop() | ||
1253 | 224 | |||
1254 | 225 | def derive_target_branch(repo, commitish_string, namespace='pkg'): | ||
1255 | 226 | return _derive_target_branch_string( | ||
1256 | 227 | repo.nearest_remote_branches(commitish_string, namespace) | ||
1257 | 228 | ) | ||
1258 | 229 | |||
1259 | 230 | |||
1260 | 231 | def git_run( | ||
1261 | 232 | pygit2_repo, | ||
1262 | 233 | args, | ||
1263 | 234 | initial_env=None, | ||
1264 | 235 | update_env=None, | ||
1265 | 236 | work_tree_path=None, | ||
1266 | 237 | index_path=None, | ||
1267 | 238 | **kwargs | ||
1268 | 239 | ): | ||
1269 | 240 | """Run the git CLI with the provided arguments | ||
1270 | 241 | |||
1271 | 242 | :param pygit2.Repository: the repository on which to act | ||
1272 | 243 | :param list(str) args: arguments to the git CLI | ||
1273 | 244 | :param dict initial_env: the environment to use | ||
1274 | 245 | :param dict update_env: additional environment variables and overrides | ||
1275 | 246 | :param dict **kwargs: further arguments to pass through to | ||
1276 | 247 | gitubuntu.run.run() | ||
1277 | 248 | :raises subprocess.CalledProcessError: if git exits non-zero | ||
1278 | 249 | :rtype: (str, str) | ||
1279 | 250 | :returns: stdout and stderr strings containing the subprocess output | ||
1280 | 251 | |||
1281 | 252 | If initial_env is not set, it defaults to the current process' environment. | ||
1282 | 253 | |||
1283 | 254 | The GIT_DIR, GIT_WORK_TREE and GIT_INDEX_FILE environment variables are set | ||
1284 | 255 | automatically as necessary based on the repository's existing location and | ||
1285 | 256 | settings. | ||
1286 | 257 | |||
1287 | 258 | If update_env is set, then the environment to be used is updated with env | ||
1288 | 259 | before the call to git is made. This can override GIT_DIR, | ||
1289 | 260 | GIT_WORK_TREE, GIT_INDEX_FILE and anything else. | ||
1290 | 261 | """ | ||
1291 | 262 | env = _derive_git_cli_env( | ||
1292 | 263 | pygit2_repo=pygit2_repo, | ||
1293 | 264 | initial_env=initial_env, | ||
1294 | 265 | update_env=update_env, | ||
1295 | 266 | work_tree_path=work_tree_path, | ||
1296 | 267 | index_path=index_path, | ||
1297 | 268 | ) | ||
1298 | 269 | return run(['git'] + list(args), env=env, **kwargs) | ||
1299 | 270 | |||
1300 | 271 | |||
1301 | 272 | class RenameableDir: | ||
1302 | 273 | """An on-disk directory that can be renamed and traversed recursively. | ||
1303 | 274 | |||
1304 | 275 | This is a thin wrapper around a filesystem path string (and must be | ||
1305 | 276 | instantiated with one). Methods and attributes are modeled around a | ||
1306 | 277 | py.path, but we do not use py.path as we don't really need its | ||
1307 | 278 | functionality and it would add another dependency. This interface allows | ||
1308 | 279 | for filesystem operations to be easily faked with a FakeRenameableDir for | ||
1309 | 280 | testing consumers of this class. | ||
1310 | 281 | |||
1311 | 282 | One wart around renaming and py.path is that once renamed a py.path object | ||
1312 | 283 | becomes useless as it no longer validly refers to an on-disk path. Rather | ||
1313 | 284 | than supporting a rename method, this wrapper provides a basename | ||
1314 | 285 | setter to handle the rename and replacement wrapped string object | ||
1315 | 286 | transparently. This moves complexity away from the class consumer, allowing | ||
1316 | 287 | the consumer to be tested more easily. | ||
1317 | 288 | |||
1318 | 289 | Since the underlying purpose of this class is to handle manipulations of a | ||
1319 | 290 | directory tree for adjustments needed during import/export, symlink | ||
1320 | 291 | handling is effectively "turned off" in the specification of this class. | ||
1321 | 292 | Symlinks to directories are not recursed into; they are handled no | ||
1322 | 293 | differently to a regular file, in the same manner as lstat(2). | ||
1323 | 294 | """ | ||
1324 | 295 | def __init__(self, path): | ||
1325 | 296 | """Create a new RenameableDir instance. | ||
1326 | 297 | |||
1327 | 298 | :param str path: the on-disk directory to wrap, which must exist. For | ||
1328 | 299 | symlinks, it is the symlink itself that must exist; the existence | ||
1329 | 300 | of a symlink's target does not matter. | ||
1330 | 301 | :raises FileNotFoundError: if the path supplied does not exist. | ||
1331 | 302 | """ | ||
1332 | 303 | # Ignore the return value of os.lstat(); this call is used to raise | ||
1333 | 304 | # FileNotFoundError if the path does not exist (as required in the spec | ||
1334 | 305 | # specified by the docstring), or succeed otherwise. The | ||
1335 | 306 | # call for os.path.lexists() would use the same underlying system call | ||
1336 | 307 | # anyway, so this is equivalent and this way we end up with a full | ||
1337 | 308 | # FileNotFoundError exception created for us with all the correct | ||
1338 | 309 | # parameters. | ||
1339 | 310 | os.lstat(path) | ||
1340 | 311 | |||
1341 | 312 | self._path = path | ||
1342 | 313 | |||
1343 | 314 | @property | ||
1344 | 315 | def basename(self): | ||
1345 | 316 | """The name of the directory itself.""" | ||
1346 | 317 | return os.path.basename(self._path) | ||
1347 | 318 | |||
1348 | 319 | @basename.setter | ||
1349 | 320 | def basename(self, new_basename): | ||
1350 | 321 | """Rename this directory.""" | ||
1351 | 322 | renamed_path = os.path.join(os.path.dirname(self._path), new_basename) | ||
1352 | 323 | os.rename(self._path, renamed_path) | ||
1353 | 324 | self._path = renamed_path | ||
1354 | 325 | |||
1355 | 326 | def listdir(self, fil=lambda x: True): | ||
1356 | 327 | """Return subdirectory objects. | ||
1357 | 328 | |||
1358 | 329 | :param fil: a function that, given a basename, returns a boolean | ||
1359 | 330 | indicating whether or not the corresponding object should be | ||
1360 | 331 | returned in the results. | ||
1361 | 332 | """ | ||
1362 | 333 | return [ | ||
1363 | 334 | RenameableDir(os.path.join(self._path, p)) | ||
1364 | 335 | for p in os.listdir(self._path) | ||
1365 | 336 | if fil(p) | ||
1366 | 337 | ] | ||
1367 | 338 | |||
1368 | 339 | @property | ||
1369 | 340 | def recursive(self): | ||
1370 | 341 | """Indicate if this object can contain subdirectory objects. | ||
1371 | 342 | |||
1372 | 343 | An object representing a file will return False. An object representing | ||
1373 | 344 | a directory will return True, even if it is empty. | ||
1374 | 345 | |||
1375 | 346 | Symlinks return False even if they point to a directory. Broken | ||
1376 | 347 | symlinks also always return False. | ||
1377 | 348 | |||
1378 | 349 | :rtype: bool | ||
1379 | 350 | """ | ||
1380 | 351 | st = os.stat(self._path, follow_symlinks=False) | ||
1381 | 352 | return stat.S_ISDIR(st.st_mode) | ||
1382 | 353 | |||
1383 | 354 | def __str__(self): | ||
1384 | 355 | return str(self._path) | ||
1385 | 356 | |||
1386 | 357 | def __repr__(self): | ||
1387 | 358 | return 'RenameableDir(%r)' % str(self) | ||
1388 | 359 | |||
1389 | 360 | def __hash__(self): | ||
1390 | 361 | # https://stackoverflow.com/q/2909106/478206 | ||
1391 | 362 | return hash(( | ||
1392 | 363 | type(self), | ||
1393 | 364 | self._path | ||
1394 | 365 | )) | ||
1395 | 366 | |||
1396 | 367 | def __eq__(self, other): | ||
1397 | 368 | return hash(self) == hash(other) | ||
1398 | 369 | |||
1399 | 370 | |||
1400 | 371 | class FakeRenameableDir: | ||
1401 | 372 | """A fake RenameDir that retains its structure in memory. | ||
1402 | 373 | |||
1403 | 374 | This is useful for testing consumers of a RenameableDir. | ||
1404 | 375 | |||
1405 | 376 | In addition, renames are recorded and those records passed up to parent | ||
1406 | 377 | FakeRenameableDir objects so that the order of renames that occur can be | ||
1407 | 378 | checked later. | ||
1408 | 379 | """ | ||
1409 | 380 | def __init__(self, basename, subdirs): | ||
1410 | 381 | """Create a new RenameableDir instance. | ||
1411 | 382 | |||
1412 | 383 | :param str basename: the basename of this instance. | ||
1413 | 384 | :param subdirs: FakeRenameableDir objects contained within this one. | ||
1414 | 385 | For non-recursive objects (such as those intended to represent | ||
1415 | 386 | files), use None. | ||
1416 | 387 | :type subdirs: list(FakeRenameableDir) | ||
1417 | 388 | """ | ||
1418 | 389 | self._basename = basename | ||
1419 | 390 | self._subdirs = subdirs | ||
1420 | 391 | |||
1421 | 392 | if self._subdirs: | ||
1422 | 393 | for subdir in self._subdirs: | ||
1423 | 394 | subdir._parent = self | ||
1424 | 395 | |||
1425 | 396 | self._parent = None | ||
1426 | 397 | self._rename_record = [] | ||
1427 | 398 | |||
1428 | 399 | @property | ||
1429 | 400 | def basename(self): | ||
1430 | 401 | return self._basename | ||
1431 | 402 | |||
1432 | 403 | @basename.setter | ||
1433 | 404 | def basename(self, new_basename): | ||
1434 | 405 | self._record_rename(self) | ||
1435 | 406 | self._basename = new_basename | ||
1436 | 407 | |||
1437 | 408 | def _record_rename(self, obj): | ||
1438 | 409 | self._rename_record.append(obj) | ||
1439 | 410 | if self._parent: | ||
1440 | 411 | self._parent._record_rename(obj) | ||
1441 | 412 | |||
1442 | 413 | def listdir(self, fil=lambda x: True): | ||
1443 | 414 | return (subdir for subdir in self._subdirs if fil(subdir.basename)) | ||
1444 | 415 | |||
1445 | 416 | @property | ||
1446 | 417 | def recursive(self): | ||
1447 | 418 | return self._subdirs is not None | ||
1448 | 419 | |||
1449 | 420 | def __hash__(self): | ||
1450 | 421 | # https://stackoverflow.com/q/2909106/478206 | ||
1451 | 422 | return hash(( | ||
1452 | 423 | type(self), | ||
1453 | 424 | self.basename, | ||
1454 | 425 | None if self._subdirs is None else tuple(self._subdirs), | ||
1455 | 426 | )) | ||
1456 | 427 | |||
1457 | 428 | def __eq__(self, other): | ||
1458 | 429 | return hash(self) == hash(other) | ||
1459 | 430 | |||
1460 | 431 | def __repr__(self): | ||
1461 | 432 | return 'FakeRenameableDir(%r, %r)' % (self.basename, self._subdirs) | ||
1462 | 433 | |||
1463 | 434 | |||
1464 | 435 | _dot_git_match = re.compile(r'^\.+git$').search | ||
1465 | 436 | _EscapeDirection = enum.Enum('EscapeDirection', ['ESCAPE', 'UNESCAPE']) | ||
1466 | 437 | |||
1467 | 438 | |||
1468 | 439 | def _escape_unescape_dot_git(path, direction): | ||
1469 | 440 | """Escape or unescape .git entries in a directory recursively. | ||
1470 | 441 | |||
1471 | 442 | :param RenameableDir path: top of directory tree to escape or unescape. | ||
1472 | 443 | :param _EscapeDirection direction: whether to escape or unescape. | ||
1473 | 444 | |||
1474 | 445 | Escaping rules: | ||
1475 | 446 | .git -> ..git | ||
1476 | 447 | ..git -> ...git | ||
1477 | 448 | ...git -> ....git | ||
1478 | 449 | etc. | ||
1479 | 450 | |||
1480 | 451 | All these escaping rules apply all of the time, regardless of whether | ||
1481 | 452 | or not .git exists. Only names matching '.git' with zero or more '.' | ||
1482 | 453 | prepended are touched. | ||
1483 | 454 | |||
1484 | 455 | This allows any directory tree to be losslessly stored in git, since git | ||
1485 | 456 | does not permit entries named '.git'. | ||
1486 | 457 | |||
1487 | 458 | Unescaping is the inverse of escaping. Before unescaping, an entry called | ||
1488 | 459 | '.git' must not exist. If it does, RuntimeError is raised, and the | ||
1489 | 460 | directory is left in an undefined (probably partially unescaped) state. | ||
1490 | 461 | """ | ||
1491 | 462 | # When escaping, we have to rename ..git to ...git before renaming .git to | ||
1492 | 463 | # ..git in order to make room, and the reverse for unescaping. If we do the | ||
1493 | 464 | # renames ordered by length of name, we can meet this requirement. | ||
1494 | 465 | # Escaping: order by longest first; unescaping: order by shortest first. | ||
1495 | 466 | sorted_subpaths_to_rename = sorted( | ||
1496 | 467 | path.listdir(fil=_dot_git_match), | ||
1497 | 468 | key=lambda p: len(p.basename), | ||
1498 | 469 | reverse=direction is _EscapeDirection.ESCAPE, | ||
1499 | 470 | ) | ||
1500 | 471 | for entry in sorted_subpaths_to_rename: | ||
1501 | 472 | if direction is _EscapeDirection.ESCAPE: | ||
1502 | 473 | # Add a leading '.' | ||
1503 | 474 | entry.basename = '.' + entry.basename | ||
1504 | 475 | else: | ||
1505 | 476 | assert direction is _EscapeDirection.UNESCAPE | ||
1506 | 477 | if entry.basename == '.git': | ||
1507 | 478 | raise RuntimeError( | ||
1508 | 479 | "%s exists but is invalid when unescaping" % entry, | ||
1509 | 480 | ) | ||
1510 | 481 | # Drop the leading '.' | ||
1511 | 482 | assert entry.basename[0] == '.' | ||
1512 | 483 | entry.basename = entry.basename[1:] | ||
1513 | 484 | |||
1514 | 485 | # Traverse the entire directory for recursive escapes; | ||
1515 | 486 | # sorted_subpaths_to_rename is already filtered so is not complete by | ||
1516 | 487 | # itself | ||
1517 | 488 | for entry in path.listdir(): | ||
1518 | 489 | if entry.recursive: | ||
1519 | 490 | _escape_unescape_dot_git(entry, direction=direction) | ||
1520 | 491 | |||
1521 | 492 | |||
1522 | 493 | def escape_dot_git(path): | ||
1523 | 494 | """Apply .git escaping to a filesystem path. | ||
1524 | 495 | |||
1525 | 496 | :param str path: path to filesystem to change | ||
1526 | 497 | """ | ||
1527 | 498 | return _escape_unescape_dot_git( | ||
1528 | 499 | path=RenameableDir(path), | ||
1529 | 500 | direction=_EscapeDirection.ESCAPE, | ||
1530 | 501 | ) | ||
1531 | 502 | |||
1532 | 503 | |||
1533 | 504 | def unescape_dot_git(path): | ||
1534 | 505 | """Unapply .git escaping to a filesystem path. | ||
1535 | 506 | |||
1536 | 507 | :param str path: path to filesystem to change | ||
1537 | 508 | |||
1538 | 509 | Any entry (including recursively) called '.git' in path is an error and | ||
1539 | 510 | will raise a RuntimeError. If an exception is raised, path may be left in a | ||
1540 | 511 | partially unescaped state. | ||
1541 | 512 | """ | ||
1542 | 513 | return _escape_unescape_dot_git( | ||
1543 | 514 | path=RenameableDir(path), | ||
1544 | 515 | direction=_EscapeDirection.UNESCAPE, | ||
1545 | 516 | ) | ||
1546 | 517 | |||
1547 | 518 | |||
1548 | 519 | class ChangelogError(Exception): | ||
1549 | 520 | pass | ||
1550 | 521 | |||
1551 | 522 | class Changelog: | ||
1552 | 523 | '''Representation of a debian/changelog file found inside a git tree-ish | ||
1553 | 524 | |||
1554 | 525 | Uses dpkg-parsechangelog for parsing, but when this fails we fall | ||
1555 | 526 | back to grep/sed-based pattern matching automatically. | ||
1556 | 527 | ''' | ||
1557 | 528 | def __init__(self, content_bytes): | ||
1558 | 529 | ''' | ||
1559 | 530 | contents: bytes string of file contents | ||
1560 | 531 | ''' | ||
1561 | 532 | self._contents = content_bytes | ||
1562 | 533 | try: | ||
1563 | 534 | self._changelog = debian.changelog.Changelog( | ||
1564 | 535 | self._contents, | ||
1565 | 536 | strict=True | ||
1566 | 537 | ) | ||
1567 | 538 | if not len(self._changelog.versions): | ||
1568 | 539 | # assume bad read, so fall back to shell later | ||
1569 | 540 | self._changelog = None | ||
1570 | 541 | except ( | ||
1571 | 542 | UnicodeDecodeError, | ||
1572 | 543 | ValueError, | ||
1573 | 544 | debian.changelog.ChangelogParseError | ||
1574 | 545 | ): | ||
1575 | 546 | self._changelog = None | ||
1576 | 547 | |||
1577 | 548 | @classmethod | ||
1578 | 549 | def from_treeish(cls, repo, treeish_object): | ||
1579 | 550 | ''' | ||
1580 | 551 | repo: pygit2.Repository instance | ||
1581 | 552 | treeish_object: pygit2.Object subclass instance (must peel to pygit2.Tree) | ||
1582 | 553 | ''' | ||
1583 | 554 | blob = follow_symlinks_to_blob( | ||
1584 | 555 | repo=repo, | ||
1585 | 556 | treeish_object=treeish_object, | ||
1586 | 557 | path='debian/changelog' | ||
1587 | 558 | ) | ||
1588 | 559 | return cls(blob.data) | ||
1589 | 560 | |||
1590 | 561 | @classmethod | ||
1591 | 562 | def from_path(cls, path): | ||
1592 | 563 | with open(path, 'rb') as f: | ||
1593 | 564 | return cls(f.read()) | ||
1594 | 565 | |||
1595 | 566 | @lru_cache() | ||
1596 | 567 | def _dpkg_parsechangelog(self, parse_params): | ||
1597 | 568 | stdout, _ = run( | ||
1598 | 569 | 'dpkg-parsechangelog -l- %s' % parse_params, | ||
1599 | 570 | input=self._contents, | ||
1600 | 571 | shell=True, | ||
1601 | 572 | verbose_on_failure=False, | ||
1602 | 573 | ) | ||
1603 | 574 | return stdout.strip() | ||
1604 | 575 | |||
1605 | 576 | @lru_cache() | ||
1606 | 577 | def _shell(self, cmd): | ||
1607 | 578 | stdout, _ = run( | ||
1608 | 579 | cmd, | ||
1609 | 580 | input=self._contents, | ||
1610 | 581 | shell=True, | ||
1611 | 582 | verbose_on_failure=False, | ||
1612 | 583 | ) | ||
1613 | 584 | return stdout.strip() | ||
1614 | 585 | |||
1615 | 586 | @property | ||
1616 | 587 | def _shell_version(self): | ||
1617 | 588 | parse_params = '-n1 -SVersion' | ||
1618 | 589 | shell_cmd = "grep -m1 '^\\S' | sed 's/.*(\\(.*\\)).*/\\1/'" | ||
1619 | 590 | try: | ||
1620 | 591 | raw_out = self._dpkg_parsechangelog(parse_params) | ||
1621 | 592 | except CalledProcessError: | ||
1622 | 593 | raw_out = self._shell(shell_cmd) | ||
1623 | 594 | return None if raw_out == '' else raw_out | ||
1624 | 595 | |||
1625 | 596 | @property | ||
1626 | 597 | def upstream_version(self): | ||
1627 | 598 | if self._changelog: | ||
1628 | 599 | return self._changelog.upstream_version | ||
1629 | 600 | version = self._shell_version | ||
1630 | 601 | m = debian.debian_support.Version.re_valid_version.match(version) | ||
1631 | 602 | if m is None: | ||
1632 | 603 | raise ValueError("Invalid version string: %s", version) | ||
1633 | 604 | return m.group('upstream_version') | ||
1634 | 605 | |||
1635 | 606 | @property | ||
1636 | 607 | def version(self): | ||
1637 | 608 | if self._changelog: | ||
1638 | 609 | try: | ||
1639 | 610 | ret = str(self._changelog.versions[0]).strip() | ||
1640 | 611 | shell_version = self._shell_version | ||
1641 | 612 | if shell_version != 'unknown' and ret != shell_version: | ||
1642 | 613 | raise ChangelogError( | ||
1643 | 614 | 'Old (%s) and new (%s) changelog values do not agree' % | ||
1644 | 615 | (self._shell_version, ret) | ||
1645 | 616 | ) | ||
1646 | 617 | return ret | ||
1647 | 618 | except IndexError: | ||
1648 | 619 | return None | ||
1649 | 620 | return self._shell_version | ||
1650 | 621 | |||
1651 | 622 | @property | ||
1652 | 623 | def _shell_previous_version(self): | ||
1653 | 624 | parse_params = '-n1 -o1 -SVersion' | ||
1654 | 625 | shell_cmd = "grep -m1 '^\\S' | tail -1 | sed 's/.*(\\(.*\\)).*/\\1/'" | ||
1655 | 626 | try: | ||
1656 | 627 | raw_out = self._dpkg_parsechangelog(parse_params) | ||
1657 | 628 | except CalledProcessError: | ||
1658 | 629 | raw_out = self._shell(shell_cmd) | ||
1659 | 630 | return None if raw_out == '' else raw_out | ||
1660 | 631 | |||
1661 | 632 | @property | ||
1662 | 633 | def previous_version(self): | ||
1663 | 634 | if self._changelog: | ||
1664 | 635 | try: | ||
1665 | 636 | ret = str(self._changelog.versions[1]).strip() | ||
1666 | 637 | if ret != self._shell_previous_version: | ||
1667 | 638 | raise ChangelogError( | ||
1668 | 639 | 'Old (%s) and new (%s) changelog values do not agree' % | ||
1669 | 640 | (self._shell_previous_version, ret) | ||
1670 | 641 | ) | ||
1671 | 642 | return ret | ||
1672 | 643 | except IndexError: | ||
1673 | 644 | return None | ||
1674 | 645 | return self._shell_previous_version | ||
1675 | 646 | |||
1676 | 647 | @property | ||
1677 | 648 | def _shell_maintainer(self): | ||
1678 | 649 | parse_params = '-SMaintainer' | ||
1679 | 650 | shell_cmd = "grep -m1 '^ --' | sed 's/ -- \\(.*\\) \\(.*\\)/\\1/'" | ||
1680 | 651 | try: | ||
1681 | 652 | return self._dpkg_parsechangelog(parse_params) | ||
1682 | 653 | except CalledProcessError: | ||
1683 | 654 | return self._shell(shell_cmd) | ||
1684 | 655 | |||
1685 | 656 | @property | ||
1686 | 657 | def maintainer(self): | ||
1687 | 658 | if self._changelog: | ||
1688 | 659 | ret = self._changelog.author.strip() | ||
1689 | 660 | if ret != self._shell_maintainer: | ||
1690 | 661 | raise ChangelogError( | ||
1691 | 662 | 'Old (%s) and new (%s) changelog values do not agree' % | ||
1692 | 663 | (self._shell_maintainer, ret) | ||
1693 | 664 | ) | ||
1694 | 665 | else: | ||
1695 | 666 | ret = self._shell_maintainer | ||
1696 | 667 | if not ret: | ||
1697 | 668 | raise ValueError("Unable to parse maintainer from changelog") | ||
1698 | 669 | return ret | ||
1699 | 670 | |||
1700 | 671 | @property | ||
1701 | 672 | def _shell_date(self): | ||
1702 | 673 | parse_params = '-SDate' | ||
1703 | 674 | shell_cmd = "grep -m1 '^ --' | sed 's/ -- \\(.*\\) \\(.*\\)/\\2/'" | ||
1704 | 675 | try: | ||
1705 | 676 | return self._dpkg_parsechangelog(parse_params) | ||
1706 | 677 | except CalledProcessError: | ||
1707 | 678 | return self._shell(shell_cmd) | ||
1708 | 679 | |||
1709 | 680 | @property | ||
1710 | 681 | def date(self): | ||
1711 | 682 | if self._changelog: | ||
1712 | 683 | ret = self._changelog.date.strip() | ||
1713 | 684 | if ret != self._shell_date: | ||
1714 | 685 | raise ChangelogError( | ||
1715 | 686 | 'Old (%s) and new (%s) changelog values do not agree' % | ||
1716 | 687 | (self._shell_date, ret) | ||
1717 | 688 | ) | ||
1718 | 689 | return ret | ||
1719 | 690 | return self._shell_date | ||
1720 | 691 | |||
1721 | 692 | @property | ||
1722 | 693 | def _shell_all_versions(self): | ||
1723 | 694 | parse_params = '--format rfc822 -SVersion --all' | ||
1724 | 695 | shell_cmd = "grep '^\\S' | sed 's/.*(\\(.*\\)).*/\\1/'" | ||
1725 | 696 | try: | ||
1726 | 697 | version_lines = self._dpkg_parsechangelog(parse_params) | ||
1727 | 698 | except CalledProcessError: | ||
1728 | 699 | version_lines = self._shell(shell_cmd) | ||
1729 | 700 | return [ | ||
1730 | 701 | v_stripped | ||
1731 | 702 | for v_stripped in ( | ||
1732 | 703 | v.strip() for v in version_lines.splitlines() | ||
1733 | 704 | ) | ||
1734 | 705 | if v_stripped | ||
1735 | 706 | ] | ||
1736 | 707 | |||
1737 | 708 | @property | ||
1738 | 709 | def all_versions(self): | ||
1739 | 710 | if self._changelog: | ||
1740 | 711 | ret = [str(v).strip() for v in self._changelog.versions] | ||
1741 | 712 | shell_all_versions = self._shell_all_versions | ||
1742 | 713 | is_equivalent = ( | ||
1743 | 714 | len(ret) == len(shell_all_versions) and | ||
1744 | 715 | all( | ||
1745 | 716 | shell_version == 'unknown' or shell_version == api_version | ||
1746 | 717 | for shell_version, api_version | ||
1747 | 718 | in zip(shell_all_versions, ret) | ||
1748 | 719 | ) | ||
1749 | 720 | ) | ||
1750 | 721 | if not is_equivalent: | ||
1751 | 722 | raise ChangelogError( | ||
1752 | 723 | "Old and new changelog values do not agree" | ||
1753 | 724 | ) | ||
1754 | 725 | return ret | ||
1755 | 726 | else: | ||
1756 | 727 | return self._shell_all_versions | ||
1757 | 728 | |||
1758 | 729 | @property | ||
1759 | 730 | def _shell_distribution(self): | ||
1760 | 731 | parse_params = '-SDistribution' | ||
1761 | 732 | shell_cmd = "grep -m1 '^\\S' | sed 's/.*\\ .*\\ \\(.*\\);.*/\\1/'" | ||
1762 | 733 | try: | ||
1763 | 734 | return self._dpkg_parsechangelog(parse_params) | ||
1764 | 735 | except CalledProcessError: | ||
1765 | 736 | return self._shell(shell_cmd) | ||
1766 | 737 | |||
1767 | 738 | @property | ||
1768 | 739 | def distribution(self): | ||
1769 | 740 | if self._changelog: | ||
1770 | 741 | ret = self._changelog.distributions | ||
1771 | 742 | if ret != self._shell_distribution: | ||
1772 | 743 | raise ChangelogError( | ||
1773 | 744 | 'Old (%s) and new (%s) changelog values do not agree' % | ||
1774 | 745 | (self._shell_distribution, ret) | ||
1775 | 746 | ) | ||
1776 | 747 | return ret | ||
1777 | 748 | return self._shell_distribution | ||
1778 | 749 | |||
1779 | 750 | @property | ||
1780 | 751 | def _shell_srcpkg(self): | ||
1781 | 752 | parse_params = '-SSource' | ||
1782 | 753 | shell_cmd = "grep -m1 '^\\S' | sed 's/\\(.*\\)\\ .*\\ .*;.*/\\1/'" | ||
1783 | 754 | try: | ||
1784 | 755 | return self._dpkg_parsechangelog(parse_params) | ||
1785 | 756 | except CalledProcessError: | ||
1786 | 757 | return self._shell(shell_cmd) | ||
1787 | 758 | |||
1788 | 759 | @property | ||
1789 | 760 | def srcpkg(self): | ||
1790 | 761 | if self._changelog: | ||
1791 | 762 | ret = self._changelog.package.strip() | ||
1792 | 763 | if ret != self._shell_srcpkg: | ||
1793 | 764 | raise ChangelogError( | ||
1794 | 765 | 'Old (%s) and new (%s) changelog values do not agree' % | ||
1795 | 766 | (self._shell_srcpkg, ret) | ||
1796 | 767 | ) | ||
1797 | 768 | return ret | ||
1798 | 769 | return self._shell_srcpkg | ||
1799 | 770 | |||
1800 | 771 | @staticmethod | ||
1801 | 772 | def _parse_changelog_date(changelog_timestamp_string): | ||
1802 | 773 | """Convert changelog timestamp into datetime object | ||
1803 | 774 | |||
1804 | 775 | This function currently requires the locale to have been set to C.UTF-8 | ||
1805 | 776 | by the caller. This would typically be done at the main entry point to | ||
1806 | 777 | the importer. | ||
1807 | 778 | |||
1808 | 779 | :param str changelog_timestamp_string: the timestamp part of the the | ||
1809 | 780 | signoff line from a changelog entry | ||
1810 | 781 | :rtype: datetime.datetime | ||
1811 | 782 | :returns: the timestamp as a datetime object | ||
1812 | 783 | :raises ValueError: if the string could not be parsed | ||
1813 | 784 | """ | ||
1814 | 785 | # We avoid using something like dateutil.parser here because the | ||
1815 | 786 | # parsing behaviour of malformed or unusually formatted dates must be | ||
1816 | 787 | # precisely as specified and not ever change behaviour. If it did, then | ||
1817 | 788 | # imports would no longer be reproducible. | ||
1818 | 789 | # | ||
1819 | 790 | # However, adding new a form of parsing an unambigious date is | ||
1820 | 791 | # acceptable if the spec is first updated accordingly since that would | ||
1821 | 792 | # only introduce new imports that would have previously failed. | ||
1822 | 793 | # | ||
1823 | 794 | # time.strptime ignores time zones, so we must use datetime.strptime() | ||
1824 | 795 | |||
1825 | 796 | # strptime doesn't support anything other than standard locale names | ||
1826 | 797 | # for days of the week, so handle the "Thur" abbreviation as a special | ||
1827 | 798 | # case as defined in the spec as it is unambiguous. | ||
1828 | 799 | adjusted_changelog_timestamp_string = re.sub( | ||
1829 | 800 | r'^Thur,', | ||
1830 | 801 | 'Thu,', | ||
1831 | 802 | changelog_timestamp_string, | ||
1832 | 803 | ) | ||
1833 | 804 | |||
1834 | 805 | acceptable_date_formats = [ | ||
1835 | 806 | '%a, %d %b %Y %H:%M:%S %z', # standard | ||
1836 | 807 | '%A, %d %b %Y %H:%M:%S %z', # full day of week | ||
1837 | 808 | '%d %b %Y %H:%M:%S %z', # missing day of week | ||
1838 | 809 | '%a, %d %B %Y %H:%M:%S %z', # full month name | ||
1839 | 810 | '%A, %d %B %Y %H:%M:%S %z', # full day of week and month name | ||
1840 | 811 | '%d %B %Y %H:%M:%S %z', # missing day of week with full month | ||
1841 | 812 | # name | ||
1842 | 813 | ] | ||
1843 | 814 | for date_format in acceptable_date_formats: | ||
1844 | 815 | try: | ||
1845 | 816 | return datetime.datetime.strptime( | ||
1846 | 817 | adjusted_changelog_timestamp_string, | ||
1847 | 818 | date_format, | ||
1848 | 819 | ) | ||
1849 | 820 | except ValueError: | ||
1850 | 821 | pass | ||
1851 | 822 | else: | ||
1852 | 823 | raise ValueError( | ||
1853 | 824 | "Could not parse date %r" % changelog_timestamp_string, | ||
1854 | 825 | ) | ||
1855 | 826 | |||
1856 | 827 | def git_authorship(self, author_date=None): | ||
1857 | 828 | """Extract last changelog entry's maintainer and timestamp | ||
1858 | 829 | |||
1859 | 830 | Parse the first changelog entry's sign-off line into git's commit | ||
1860 | 831 | authorship metadata model according to the import specification. | ||
1861 | 832 | |||
1862 | 833 | :param datetime.datetime author_date: overrides the author date | ||
1863 | 834 | normally parsed from the changelog entry (i.e. for handling date | ||
1864 | 835 | parsing edge cases). Any sub-second part of the timestamp is | ||
1865 | 836 | truncated. | ||
1866 | 837 | :rtype: tuple(str, str, int, int) | ||
1867 | 838 | :returns: tuple of name, email, time (in seconds since epoch) and | ||
1868 | 839 | offset from UTC (in minutes) | ||
1869 | 840 | :raises ValueError: if the changelog sign-off line cannot be parsed | ||
1870 | 841 | """ | ||
1871 | 842 | m = re.match(r'(?P<name>.*)<+(?P<email>.*?)>+', self.maintainer) | ||
1872 | 843 | if m is None: | ||
1873 | 844 | raise ValueError('Cannot get authorship') | ||
1874 | 845 | |||
1875 | 846 | author_epoch_seconds, author_tz_offset = datetime_to_signature_spec( | ||
1876 | 847 | self._parse_changelog_date(self.date) | ||
1877 | 848 | if author_date is None | ||
1878 | 849 | else author_date | ||
1879 | 850 | ) | ||
1880 | 851 | |||
1881 | 852 | return ( | ||
1882 | 853 | # If the author name is empty, then it must be | ||
1883 | 854 | # EMPTY_GIT_AUTHOR_NAME because git will not accept an empty author | ||
1884 | 855 | # name. See the specification for details. | ||
1885 | 856 | ( | ||
1886 | 857 | m.group('name').strip() | ||
1887 | 858 | or gitubuntu.spec.EMPTY_GIT_AUTHOR_NAME | ||
1888 | 859 | ), | ||
1889 | 860 | m.group('email'), | ||
1890 | 861 | author_epoch_seconds, | ||
1891 | 862 | author_tz_offset, | ||
1892 | 863 | ) | ||
1893 | 864 | |||
1894 | 865 | |||
1895 | 866 | class GitUbuntuChangelogError(Exception): | ||
1896 | 867 | pass | ||
1897 | 868 | |||
1898 | 869 | class PristineTarError(Exception): | ||
1899 | 870 | pass | ||
1900 | 871 | |||
1901 | 872 | class PristineTarNotFoundError(PristineTarError): | ||
1902 | 873 | pass | ||
1903 | 874 | |||
1904 | 875 | class MultiplePristineTarFoundError(PristineTarError): | ||
1905 | 876 | pass | ||
1906 | 877 | |||
1907 | 878 | |||
1908 | 879 | def git_dep14_tag(version): | ||
1909 | 880 | """Munge a version string according to http://dep.debian.net/deps/dep14/""" | ||
1910 | 881 | version = str(version) | ||
1911 | 882 | version = version.replace('~', '_') | ||
1912 | 883 | version = version.replace(':', '%') | ||
1913 | 884 | version = version.replace('..', '.#.') | ||
1914 | 885 | if version.endswith('.'): | ||
1915 | 886 | version = version + '#' | ||
1916 | 887 | if version.endswith('.lock'): | ||
1917 | 888 | pre, _, _ = version.partition('.lock') | ||
1918 | 889 | version = pre + '.#lock' | ||
1919 | 890 | return version | ||
1920 | 891 | |||
1921 | 892 | def import_tag(version, namespace, patch_state=PatchState.UNAPPLIED): | ||
1922 | 893 | return '%s/%s/%s' % ( | ||
1923 | 894 | namespace, | ||
1924 | 895 | { | ||
1925 | 896 | PatchState.UNAPPLIED: 'import', | ||
1926 | 897 | PatchState.APPLIED: 'applied', | ||
1927 | 898 | }[patch_state], | ||
1928 | 899 | git_dep14_tag(version), | ||
1929 | 900 | ) | ||
1930 | 901 | |||
1931 | 902 | def reimport_tag_prefix(version, namespace, patch_state=PatchState.UNAPPLIED): | ||
1932 | 903 | return '%s/reimport/%s/%s' % ( | ||
1933 | 904 | namespace, | ||
1934 | 905 | { | ||
1935 | 906 | PatchState.UNAPPLIED: 'import', | ||
1936 | 907 | PatchState.APPLIED: 'applied', | ||
1937 | 908 | }[patch_state], | ||
1938 | 909 | git_dep14_tag(version), | ||
1939 | 910 | ) | ||
1940 | 911 | |||
1941 | 912 | def reimport_tag( | ||
1942 | 913 | version, | ||
1943 | 914 | namespace, | ||
1944 | 915 | reimport, | ||
1945 | 916 | patch_state=PatchState.UNAPPLIED, | ||
1946 | 917 | ): | ||
1947 | 918 | return '%s/%s' % ( | ||
1948 | 919 | reimport_tag_prefix(version, namespace, patch_state=patch_state), | ||
1949 | 920 | reimport, | ||
1950 | 921 | ) | ||
1951 | 922 | |||
1952 | 923 | def upload_tag(version, namespace): | ||
1953 | 924 | return '%s/upload/%s' % (namespace, git_dep14_tag(version)) | ||
1954 | 925 | |||
1955 | 926 | def upstream_tag(version, namespace): | ||
1956 | 927 | return '%s/upstream/%s' % (namespace, git_dep14_tag(version)) | ||
1957 | 928 | |||
1958 | 929 | def orphan_tag(version, namespace): | ||
1959 | 930 | return '%s/orphan/%s' % (namespace, git_dep14_tag(version)) | ||
1960 | 931 | |||
1961 | 932 | def is_dir_3_0_quilt(_dir=None): | ||
1962 | 933 | _dir = _dir if _dir else '.' | ||
1963 | 934 | try: | ||
1964 | 935 | fmt, _ = run(['dpkg-source', '--print-format', _dir]) | ||
1965 | 936 | if '3.0 (quilt)' in fmt: | ||
1966 | 937 | return True | ||
1967 | 938 | except CalledProcessError as e: | ||
1968 | 939 | try: | ||
1969 | 940 | with open(os.path.join(_dir, 'debian/source/format'), 'r') as f: | ||
1970 | 941 | for line in f: | ||
1971 | 942 | if re.match(r'3.0 (.*)', line): | ||
1972 | 943 | return True | ||
1973 | 944 | # `man dpkg-source` indicates no d/s/format implies 1.0 | ||
1974 | 945 | except OSError: | ||
1975 | 946 | pass | ||
1976 | 947 | |||
1977 | 948 | return False | ||
1978 | 949 | |||
1979 | 950 | def is_3_0_quilt(repo, commitish='HEAD'): | ||
1980 | 951 | with repo.temporary_worktree(commitish): | ||
1981 | 952 | return is_dir_3_0_quilt() | ||
1982 | 953 | |||
1983 | 954 | class GitUbuntuRepositoryFetchError(Exception): | ||
1984 | 955 | pass | ||
1985 | 956 | |||
1986 | 957 | |||
1987 | 958 | def determine_quilt_series_path(pygit2_repo, treeish_obj): | ||
1988 | 959 | """Find the active quilt series file path in use. | ||
1989 | 960 | |||
1990 | 961 | Look in the given tree for the Debian patch series file that is active | ||
1991 | 962 | according to the search algorithm described in dpkg-source(1). If none are | ||
1992 | 963 | found, return the default series path (again from dpkg-source(1)). | ||
1993 | 964 | |||
1994 | 965 | :param pygit2.Repo pygit2_repo: repository to look in. | ||
1995 | 966 | :param pygit2.Object treeish_obj: object that peels to a pygit2.Tree. | ||
1996 | 967 | :returns: relative path to series file. | ||
1997 | 968 | :rtype: str | ||
1998 | 969 | """ | ||
1999 | 970 | for series_name in ['debian.series', 'series']: | ||
2000 | 971 | try: | ||
2001 | 972 | series_path = posixpath.join('debian/patches', series_name) | ||
2002 | 973 | blob = follow_symlinks_to_blob( | ||
2003 | 974 | repo=pygit2_repo, | ||
2004 | 975 | treeish_object=treeish_obj, | ||
2005 | 976 | path=series_path, | ||
2006 | 977 | ) | ||
2007 | 978 | except KeyError: | ||
2008 | 979 | continue # try the next path using our search list | ||
2009 | 980 | return series_path # series file blob found at this path | ||
2010 | 981 | |||
2011 | 982 | logging.debug("Unable to find a series file in %r", treeish_obj) | ||
2012 | 983 | return 'debian/patches/series' # default when no series file found | ||
2013 | 984 | |||
2014 | 985 | |||
2015 | 986 | def quilt_env(pygit2_repo, treeish): | ||
2016 | 987 | """Find the appropriate quilt environment to use. | ||
2017 | 988 | |||
2018 | 989 | Return the canonical environment that should be used when calling quilt. | ||
2019 | 990 | Since the series file doesn't necessarily always have the same name, a | ||
2020 | 991 | source tree is examined to determine the name and set QUILT_SERIES | ||
2021 | 992 | appropriately. | ||
2022 | 993 | |||
2023 | 994 | This does not integrate any other environment variables. Only environment | ||
2024 | 995 | variables that influence quilt are returned. | ||
2025 | 996 | |||
2026 | 997 | :param pygit2.Repo pygit2_repo: repository to look in. | ||
2027 | 998 | :param pygit2.Object treeish: object that peels to a pygit2.Tree. | ||
2028 | 999 | :returns: quilt-specific environment settings | ||
2029 | 1000 | :rtype: dict | ||
2030 | 1001 | """ | ||
2031 | 1002 | return { | ||
2032 | 1003 | 'QUILT_PATCHES': 'debian/patches', | ||
2033 | 1004 | 'QUILT_SERIES': determine_quilt_series_path(pygit2_repo, treeish), | ||
2034 | 1005 | 'QUILT_NO_DIFF_INDEX': '1', | ||
2035 | 1006 | 'QUILT_NO_DIFF_TIMESTAMPS': '1', | ||
2036 | 1007 | 'EDITOR': 'true', | ||
2037 | 1008 | } | ||
2038 | 1009 | |||
2039 | 1010 | |||
2040 | 1011 | def datetime_to_signature_spec(datetime): | ||
2041 | 1012 | """Convert a datetime to the time and offset required by a pygit2.Signature | ||
2042 | 1013 | |||
2043 | 1014 | :param datetime datetime: the timezone-aware datetime to convert | ||
2044 | 1015 | :rtype: tuple(int, int) | ||
2045 | 1016 | :returns: the time since epoch and timezone offset in minutes as suitable | ||
2046 | 1017 | for passing to the pygit2.Signature constructor parameters time and | ||
2047 | 1018 | offset. | ||
2048 | 1019 | """ | ||
2049 | 1020 | # Divide by 60 for seconds -> minutes | ||
2050 | 1021 | offset_td = datetime.utcoffset() | ||
2051 | 1022 | offset_mins = ( | ||
2052 | 1023 | int(offset_td.total_seconds()) // 60 | ||
2053 | 1024 | if offset_td | ||
2054 | 1025 | else 0 | ||
2055 | 1026 | ) | ||
2056 | 1027 | |||
2057 | 1028 | return int(datetime.timestamp()), offset_mins | ||
2058 | 1029 | |||
2059 | 1030 | |||
2060 | 1031 | class HeadInfoItem(collections.namedtuple( | ||
2061 | 1032 | 'HeadInfoItem', | ||
2062 | 1033 | [ | ||
2063 | 1034 | 'version', | ||
2064 | 1035 | 'commit_time', | ||
2065 | 1036 | 'commit_id', | ||
2066 | 1037 | ], | ||
2067 | 1038 | )): | ||
2068 | 1039 | """Information associated with a single branch head | ||
2069 | 1040 | |||
2070 | 1041 | :ivar str version: the package version found in debian/changelog at the | ||
2071 | 1042 | branch head. | ||
2072 | 1043 | :ivar int commit_time: the timestamp of the commit at the branch head, | ||
2073 | 1044 | expressed as seconds since the Unix epoch. | ||
2074 | 1045 | :ivar pygit2.Oid commit_id: the hash of the commit at the branch head. | ||
2075 | 1046 | """ | ||
2076 | 1047 | pass | ||
2077 | 1048 | |||
2078 | 1049 | |||
2079 | 1050 | class GitUbuntuRepository: | ||
2080 | 1051 | """A class for interacting with an importer git repository | ||
2081 | 1052 | |||
2082 | 1053 | This class attempts to put all objects it manipulates in an | ||
2083 | 1054 | 'importer/' namespace. It also uses tags in one of three namespaces: | ||
2084 | 1055 | 'import/' for successfully imported published versions (these are | ||
2085 | 1056 | created by the importer); 'upload/' for uploaded version by Ubuntu | ||
2086 | 1057 | developers (these are understood by the importer and are aliased by | ||
2087 | 1058 | import/ tags when succesfully imported); and 'orphan/' for published | ||
2088 | 1059 | versions for which no parents can be found (these are also created | ||
2089 | 1060 | by the importer). | ||
2090 | 1061 | |||
2091 | 1062 | To access the underlying pygit2.Repository object, use the raw_repo | ||
2092 | 1063 | property. | ||
2093 | 1064 | """ | ||
2094 | 1065 | |||
2095 | 1066 | def __init__( | ||
2096 | 1067 | self, | ||
2097 | 1068 | local_dir, | ||
2098 | 1069 | lp_user=None, | ||
2099 | 1070 | fetch_proto=None, | ||
2100 | 1071 | delete_on_close=True, | ||
2101 | 1072 | ): | ||
2102 | 1073 | """ | ||
2103 | 1074 | If fetch_proto is None, the default value from | ||
2104 | 1075 | gitubuntu.__main__ will be used (top_level_defaults.proto). | ||
2105 | 1076 | """ | ||
2106 | 1077 | if local_dir is None: | ||
2107 | 1078 | self._local_dir = tempfile.mkdtemp() | ||
2108 | 1079 | else: | ||
2109 | 1080 | local_dir = os.path.abspath(local_dir) | ||
2110 | 1081 | try: | ||
2111 | 1082 | os.mkdir(local_dir) | ||
2112 | 1083 | except FileExistsError: | ||
2113 | 1084 | local_dir_list = os.listdir(local_dir) | ||
2114 | 1085 | if local_dir_list and os.getenv( | ||
2115 | 1086 | 'GIT_DIR', | ||
2116 | 1087 | '.git', | ||
2117 | 1088 | ) not in local_dir_list: | ||
2118 | 1089 | logging.error('Specified directory %s must either ' | ||
2119 | 1090 | 'be empty or have been previously ' | ||
2120 | 1091 | 'imported to.', local_dir) | ||
2121 | 1092 | sys.exit(1) | ||
2122 | 1093 | self._local_dir = local_dir | ||
2123 | 1094 | |||
2124 | 1095 | self.raw_repo = pygit2.init_repository(self._local_dir) | ||
2125 | 1096 | # We rely on raw_repo.workdir to be identical to self._local_dir to | ||
2126 | 1097 | # avoid changing previous behaviour in the setting of GIT_WORK_TREE, so | ||
2127 | 1098 | # assert that it is so. This may not be the case if the git repository | ||
2128 | 1099 | # has a different workdir stored in its configuration or if the git | ||
2129 | 1100 | # repository is a bare repository. We didn't handle these cases before | ||
2130 | 1101 | # anyway, so with this assertion we can fail noisily and early. | ||
2131 | 1102 | assert ( | ||
2132 | 1103 | os.path.normpath(self.raw_repo.workdir) == | ||
2133 | 1104 | os.path.normpath(self._local_dir) | ||
2134 | 1105 | ) | ||
2135 | 1106 | |||
2136 | 1107 | # Since previous behaviour of this class depended on the state of the | ||
2137 | 1108 | # environment at the time it was constructed, save this for later use | ||
2138 | 1109 | # (for example in deriving the environment to use for calls to the git | ||
2139 | 1110 | # CLI). This permits the behaviour to remain identical for now. | ||
2140 | 1111 | # Eventually we can break previous behaviour and eliminate the need for | ||
2141 | 1112 | # this. See also: gitubuntu.test_fixtures.repo; the handling of EMAIL | ||
2142 | 1113 | # there could be made cleaner when this is cleaned up. | ||
2143 | 1114 | self._initial_env = os.environ.copy() | ||
2144 | 1115 | |||
2145 | 1116 | self.set_git_attributes() | ||
2146 | 1117 | |||
2147 | 1118 | if lp_user: | ||
2148 | 1119 | self._lp_user = lp_user | ||
2149 | 1120 | else: | ||
2150 | 1121 | try: | ||
2151 | 1122 | self._lp_user, _ = self.git_run( | ||
2152 | 1123 | ['config', 'gitubuntu.lpuser'], | ||
2153 | 1124 | verbose_on_failure=False, | ||
2154 | 1125 | ) | ||
2155 | 1126 | self._lp_user = self._lp_user.strip() | ||
2156 | 1127 | except CalledProcessError: | ||
2157 | 1128 | self._lp_user = None | ||
2158 | 1129 | |||
2159 | 1130 | if fetch_proto is None: | ||
2160 | 1131 | fetch_proto = top_level_defaults.proto | ||
2161 | 1132 | |||
2162 | 1133 | self._fetch_proto = fetch_proto | ||
2163 | 1134 | self._delete_on_close = delete_on_close | ||
2164 | 1135 | |||
2165 | 1136 | def close(self): | ||
2166 | 1137 | """Free resources associated with this instance | ||
2167 | 1138 | |||
2168 | 1139 | If delete_on_close was True on instance construction, local_dir (as | ||
2169 | 1140 | specified on instance construction) will be deleted. | ||
2170 | 1141 | |||
2171 | 1142 | After this method is called, the instance is invalid and can no longer | ||
2172 | 1143 | be used. | ||
2173 | 1144 | """ | ||
2174 | 1145 | if self.raw_repo and self._delete_on_close: | ||
2175 | 1146 | shutil.rmtree(self.local_dir) | ||
2176 | 1147 | self.raw_repo = None | ||
2177 | 1148 | |||
2178 | 1149 | def create_orphan_branch(self, branch_name, msg): | ||
2179 | 1150 | if self.get_head_by_name(branch_name) is None: | ||
2180 | 1151 | self.git_run(['checkout', '--orphan', branch_name]) | ||
2181 | 1152 | self.git_run(['commit', '--allow-empty', '-m', msg]) | ||
2182 | 1153 | self.git_run(['checkout', '--orphan', 'master']) | ||
2183 | 1154 | |||
2184 | 1155 | @contextmanager | ||
2185 | 1156 | def pristine_tar_branches(self, dist, namespace='pkg', create=True): | ||
2186 | 1157 | """Context manager wrapping pristine-tar branch manipulation | ||
2187 | 1158 | |||
2188 | 1159 | In this context, the repository pristine-tar branch will point to | ||
2189 | 1160 | the pristine-tar branch for @dist distribution in @namespace. | ||
2190 | 1161 | |||
2191 | 1162 | Because of our model, the distribution-pristine-tar branch may | ||
2192 | 1163 | be a local branch (import-time) or a remote-tracking branch | ||
2193 | 1164 | (build-time) and we need different behavior in both cases. | ||
2194 | 1165 | Specifically, we want to affect the local branch's contents, but | ||
2195 | 1166 | we cannot do that to a remote-tracking branch. | ||
2196 | 1167 | |||
2197 | 1168 | Upon entry to the context, detect the former case (by doing a | ||
2198 | 1169 | local only lookup first) and doing a branch rename there. | ||
2199 | 1170 | Otherwise, create a new local branch. | ||
2200 | 1171 | |||
2201 | 1172 | Upon exit, if a local branch had been found, rename pristine-tar | ||
2202 | 1173 | back to the original name. Otherwise, simply delete the created | ||
2203 | 1174 | pristine-tar branch. | ||
2204 | 1175 | |||
2205 | 1176 | If a local branch named pristine-tar existed outside this | ||
2206 | 1177 | context, it will be restored upon leaving the context. | ||
2207 | 1178 | |||
2208 | 1179 | :param dist str One of 'ubuntu' or 'debian' | ||
2209 | 1180 | :param namespace str Namespace under which Git refs are found | ||
2210 | 1181 | :param create bool If an appropriate local pristine-tar Git | ||
2211 | 1182 | branch does not exist, create one using the above algorithm. | ||
2212 | 1183 | """ | ||
2213 | 1184 | pt_branch = '%s/importer/%s/pristine-tar' % (namespace, dist) | ||
2214 | 1185 | old_pt_branch = self.raw_repo.lookup_branch('pristine-tar') | ||
2215 | 1186 | old_pt_branch_commit = None | ||
2216 | 1187 | if old_pt_branch: | ||
2217 | 1188 | old_pt_branch_commit = old_pt_branch.peel(pygit2.Commit) | ||
2218 | 1189 | old_pt_branch.delete() | ||
2219 | 1190 | local_pt_branch = self.raw_repo.lookup_branch(pt_branch) | ||
2220 | 1191 | remote_pt_branch = self.raw_repo.lookup_branch( | ||
2221 | 1192 | pt_branch, | ||
2222 | 1193 | pygit2.GIT_BRANCH_REMOTE, | ||
2223 | 1194 | ) | ||
2224 | 1195 | if local_pt_branch: | ||
2225 | 1196 | local_pt_branch.rename('pristine-tar') | ||
2226 | 1197 | elif remote_pt_branch: | ||
2227 | 1198 | self.raw_repo.create_branch( | ||
2228 | 1199 | 'pristine-tar', | ||
2229 | 1200 | remote_pt_branch.peel(pygit2.Commit), | ||
2230 | 1201 | ) | ||
2231 | 1202 | elif create: | ||
2232 | 1203 | # This should only be possible when importing and the first | ||
2233 | 1204 | # pristine-tar usage, create an orphan branch at the local | ||
2234 | 1205 | # pt branch location and flag it for cleanup | ||
2235 | 1206 | local_pt_branch = True | ||
2236 | 1207 | self.create_orphan_branch( | ||
2237 | 1208 | 'pristine-tar', | ||
2238 | 1209 | 'Initial %s pristine-tar branch.' % dist, | ||
2239 | 1210 | ) | ||
2240 | 1211 | if not self.raw_repo.lookup_branch('do-not-push'): | ||
2241 | 1212 | self.create_orphan_branch( | ||
2242 | 1213 | 'do-not-push', | ||
2243 | 1214 | 'Initial upstream branch.', | ||
2244 | 1215 | ) | ||
2245 | 1216 | try: | ||
2246 | 1217 | yield | ||
2247 | 1218 | except: | ||
2248 | 1219 | raise | ||
2249 | 1220 | finally: | ||
2250 | 1221 | if local_pt_branch: # or create above | ||
2251 | 1222 | self.raw_repo.lookup_branch('pristine-tar').rename(pt_branch) | ||
2252 | 1223 | elif remote_pt_branch: | ||
2253 | 1224 | self.raw_repo.lookup_branch('pristine-tar').delete() | ||
2254 | 1225 | if old_pt_branch_commit: | ||
2255 | 1226 | self.raw_repo.create_branch( | ||
2256 | 1227 | 'pristine-tar', | ||
2257 | 1228 | old_pt_branch_commit, | ||
2258 | 1229 | ) | ||
2259 | 1230 | |||
2260 | 1231 | def pristine_tar_list(self, dist, namespace='pkg'): | ||
2261 | 1232 | """List tarballs stored in pristine-tar branch for @dist distribution in @namespace. | ||
2262 | 1233 | |||
2263 | 1234 | If there is no pristine-tar branch, `pristine-tar list` returns | ||
2264 | 1235 | nothing. | ||
2265 | 1236 | |||
2266 | 1237 | :param dist str One of 'ubuntu' or 'debian' | ||
2267 | 1238 | :param namespace str Namespace under which Git refs are found | ||
2268 | 1239 | :rtype list(str) | ||
2269 | 1240 | :returns List of orig tarball names stored in the pristine-tar | ||
2270 | 1241 | branches | ||
2271 | 1242 | """ | ||
2272 | 1243 | with self.pristine_tar_branches(dist, namespace, create=False): | ||
2273 | 1244 | stdout, _ = run(['pristine-tar', 'list']) | ||
2274 | 1245 | return stdout.strip().splitlines() | ||
2275 | 1246 | |||
2276 | 1247 | def pristine_tar_extract(self, pkgname, version, dist=None, namespace='pkg'): | ||
2277 | 1248 | '''Extract orig tarballs for a given package and upstream version | ||
2278 | 1249 | |||
2279 | 1250 | This function will fail if the expected tarballs are already | ||
2280 | 1251 | present by name in the parent directory. If, at some point, this | ||
2281 | 1252 | is not desired, we would need to pass --git-force-create to | ||
2282 | 1253 | gbp-buildpackage. | ||
2283 | 1254 | |||
2284 | 1255 | The files, once created, are the responsibility of the caller to | ||
2285 | 1256 | remove, if necessary. | ||
2286 | 1257 | |||
2287 | 1258 | raises: | ||
2288 | 1259 | - PristineTarNotFoundError if no suitable tarballs are found | ||
2289 | 1260 | - MultiplePristineTarFoundError if multiple distinct suitable tarballs | ||
2290 | 1261 | are found | ||
2291 | 1262 | - CalledProcessError if gbp-buildpackage fails | ||
2292 | 1263 | |||
2293 | 1264 | :param pkgname str Source package name | ||
2294 | 1265 | :param version str Source package upstream version | ||
2295 | 1266 | :param dist str One of 'ubuntu' or 'debian' | ||
2296 | 1267 | :param namespace str Namespace under which Git refs are found | ||
2297 | 1268 | :rtype list(str) | ||
2298 | 1269 | :returns List of tarball paths that are now present on the | ||
2299 | 1270 | filesystem. They will be in the parent directory. | ||
2300 | 1271 | ''' | ||
2301 | 1272 | dists = [dist] if dist else ['debian', 'ubuntu'] | ||
2302 | 1273 | for dist in dists: | ||
2303 | 1274 | main_tarball = '%s_%s.orig.tar' % (pkgname, version) | ||
2304 | 1275 | |||
2305 | 1276 | all_tarballs = self.pristine_tar_list(dist, namespace) | ||
2306 | 1277 | |||
2307 | 1278 | potential_main_tarballs = [tarball for tarball | ||
2308 | 1279 | in all_tarballs if tarball.startswith(main_tarball)] | ||
2309 | 1280 | if len(potential_main_tarballs) == 0: | ||
2310 | 1281 | continue | ||
2311 | 1282 | if len(potential_main_tarballs) > 1: | ||
2312 | 1283 | # This will need some extension/flag for the case of there | ||
2313 | 1284 | # being multiple imports with varying compression | ||
2314 | 1285 | raise MultiplePristineTarFoundError( | ||
2315 | 1286 | 'More than one pristine-tar tarball found for %s: %s' % | ||
2316 | 1287 | (version, potential_main_tarballs) | ||
2317 | 1288 | ) | ||
2318 | 1289 | ext = os.path.splitext(potential_main_tarballs[0])[1] | ||
2319 | 1290 | tarballs = [] | ||
2320 | 1291 | tarballs.append( | ||
2321 | 1292 | os.path.join(os.path.pardir, potential_main_tarballs[0]) | ||
2322 | 1293 | ) | ||
2323 | 1294 | args = ['buildpackage', '--git-builder=/bin/true', | ||
2324 | 1295 | '--git-pristine-tar', '--git-ignore-branch', | ||
2325 | 1296 | '--git-upstream-tag=%s/upstream/%s/%%(version)s%s' % | ||
2326 | 1297 | (namespace, dist, ext)] | ||
2327 | 1298 | # This will probably break if the component tarballs get | ||
2328 | 1299 | # compressed differently, as each component tarball will show up | ||
2329 | 1300 | # multiple times | ||
2330 | 1301 | # Breaks may be too strong -- we will 'over cache' tarballs, and | ||
2331 | 1302 | # then it's up to dpkg-buildpackage to use the 'correct' one | ||
2332 | 1303 | potential_component_tarballs = { | ||
2333 | 1304 | component_tarball_matches(tarball, pkgname, version).group('component') : tarball | ||
2334 | 1305 | for tarball in all_tarballs | ||
2335 | 1306 | if component_tarball_matches(tarball, pkgname, version) | ||
2336 | 1307 | } | ||
2337 | 1308 | tarballs.extend(map(lambda x : os.path.join(os.path.pardir, x), | ||
2338 | 1309 | list(potential_component_tarballs.values())) | ||
2339 | 1310 | ) | ||
2340 | 1311 | args.extend(map(lambda x : '--git-component=%s' % x, | ||
2341 | 1312 | list(potential_component_tarballs.keys())) | ||
2342 | 1313 | ) | ||
2343 | 1314 | with self.pristine_tar_branches(dist, namespace): | ||
2344 | 1315 | run_gbp(args, env=self.env) | ||
2345 | 1316 | return tarballs | ||
2346 | 1317 | |||
2347 | 1318 | raise PristineTarNotFoundError( | ||
2348 | 1319 | 'No pristine-tar tarball found for %s' % version | ||
2349 | 1320 | ) | ||
2350 | 1321 | |||
2351 | 1322 | def pristine_tar_exists(self, pkgname, version, namespace='pkg'): | ||
2352 | 1323 | '''Report distributions that contain pristine-tar data for @version | ||
2353 | 1324 | |||
2354 | 1325 | raises: | ||
2355 | 1326 | - MultiplePristineTarFoundError if multiple distinct suitable tarballs | ||
2356 | 1327 | are found | ||
2357 | 1328 | |||
2358 | 1329 | :param pkgname str Source package name | ||
2359 | 1330 | :param version str Source package upstream version | ||
2360 | 1331 | :param namespace str Namespace under which Git refs are found | ||
2361 | 1332 | :rtype list(str) | ||
2362 | 1333 | :returns List of distribution names which contain a pristine-tar | ||
2363 | 1334 | import for @pkgname and @version | ||
2364 | 1335 | ''' | ||
2365 | 1336 | results = [] | ||
2366 | 1337 | for dist in ['debian', 'ubuntu']: | ||
2367 | 1338 | main_tarball = '%s_%s.orig.tar' % (pkgname, version) | ||
2368 | 1339 | |||
2369 | 1340 | all_tarballs = self.pristine_tar_list(dist, namespace) | ||
2370 | 1341 | |||
2371 | 1342 | potential_main_tarballs = [tarball for tarball | ||
2372 | 1343 | in all_tarballs if tarball.startswith(main_tarball)] | ||
2373 | 1344 | if len(potential_main_tarballs) == 0: | ||
2374 | 1345 | continue | ||
2375 | 1346 | if len(potential_main_tarballs) > 1: | ||
2376 | 1347 | # This will need some extension/flag for the case of there | ||
2377 | 1348 | # being multiple imports with varying compression | ||
2378 | 1349 | raise MultiplePristineTarFoundError( | ||
2379 | 1350 | 'More than one pristine-tar tarball found for %s: %s' % | ||
2380 | 1351 | (version, potential_main_tarballs) | ||
2381 | 1352 | ) | ||
2382 | 1353 | results.append(dist) | ||
2383 | 1354 | |||
2384 | 1355 | return results | ||
2385 | 1356 | |||
2386 | 1357 | def verify_pristine_tar(self, tarball_paths, dist, namespace='pkg'): | ||
2387 | 1358 | '''Verify the pristine-tar data matches for a set of paths | ||
2388 | 1359 | |||
2389 | 1360 | raises: | ||
2390 | 1361 | PristineTarError - if a tarball has been imported before, | ||
2391 | 1362 | but the contents of the new tarball do not match | ||
2392 | 1363 | |||
2393 | 1364 | :param tarball_paths list(str) List of filesystem paths of orig | ||
2394 | 1365 | tarballs to verify | ||
2395 | 1366 | :param dist str One of 'ubuntu' or 'debian' | ||
2396 | 1367 | :param namespace str Namespace under which Git refs are found | ||
2397 | 1368 | :rtype bool | ||
2398 | 1369 | :returns True if all paths in @tarball_paths exist in @dist's | ||
2399 | 1370 | pristine-tar branch under @namespace and match the | ||
2400 | 1371 | corresponding pristine-tar contents exactly | ||
2401 | 1372 | ''' | ||
2402 | 1373 | all_tarballs = self.pristine_tar_list(dist, namespace) | ||
2403 | 1374 | for path in tarball_paths: | ||
2404 | 1375 | if os.path.basename(path) not in all_tarballs: | ||
2405 | 1376 | break | ||
2406 | 1377 | try: | ||
2407 | 1378 | with self.pristine_tar_branches(dist, namespace): | ||
2408 | 1379 | # need to handle this not existing | ||
2409 | 1380 | run(['pristine-tar', 'verify', path]) | ||
2410 | 1381 | except CalledProcessError as e: | ||
2411 | 1382 | raise PristineTarError( | ||
2412 | 1383 | 'Tarball has already been imported to %s with ' | ||
2413 | 1384 | 'different contents' % dist | ||
2414 | 1385 | ) | ||
2415 | 1386 | else: | ||
2416 | 1387 | return True | ||
2417 | 1388 | |||
2418 | 1389 | return False | ||
2419 | 1390 | |||
2420 | 1391 | def set_git_attributes(self): | ||
2421 | 1392 | git_attr_path = os.path.join(self.raw_repo.path, | ||
2422 | 1393 | 'info', | ||
2423 | 1394 | 'attributes' | ||
2424 | 1395 | ) | ||
2425 | 1396 | try: | ||
2426 | 1397 | # common-case: create an attributes file | ||
2427 | 1398 | with open(git_attr_path, 'x') as f: | ||
2428 | 1399 | f.write('* -ident\n') | ||
2429 | 1400 | f.write('* -text\n') | ||
2430 | 1401 | f.write('* -eol\n') | ||
2431 | 1402 | except FileExistsError: | ||
2432 | 1403 | # next-most common-case: attributes file already exists and | ||
2433 | 1404 | # contains our desired value | ||
2434 | 1405 | try: | ||
2435 | 1406 | runq(['grep', '-q', '* -ident', git_attr_path]) | ||
2436 | 1407 | except CalledProcessError: | ||
2437 | 1408 | # least-common case: attributes file exists, but does | ||
2438 | 1409 | # not contain our desired value | ||
2439 | 1410 | try: | ||
2440 | 1411 | with open(git_attr_path, 'a') as f: | ||
2441 | 1412 | f.write('* -ident\n') | ||
2442 | 1413 | except: | ||
2443 | 1414 | # failed all three cases to set our desired value in | ||
2444 | 1415 | # attributes file | ||
2445 | 1416 | logging.exception('Unable to set \'* -ident\' in %s' % | ||
2446 | 1417 | git_attr_path | ||
2447 | 1418 | ) | ||
2448 | 1419 | sys.exit(1) | ||
2449 | 1420 | try: | ||
2450 | 1421 | runq(['grep', '-q', '* -text', git_attr_path]) | ||
2451 | 1422 | except CalledProcessError: | ||
2452 | 1423 | # least-common case: attributes file exists, but does | ||
2453 | 1424 | # not contain our desired value | ||
2454 | 1425 | try: | ||
2455 | 1426 | with open(git_attr_path, 'a') as f: | ||
2456 | 1427 | f.write('* -text\n') | ||
2457 | 1428 | except: | ||
2458 | 1429 | # failed all three cases to set our desired value in | ||
2459 | 1430 | # attributes file | ||
2460 | 1431 | logging.exception('Unable to set \'* -text\' in %s' % | ||
2461 | 1432 | git_attr_path | ||
2462 | 1433 | ) | ||
2463 | 1434 | sys.exit(1) | ||
2464 | 1435 | try: | ||
2465 | 1436 | runq(['grep', '-q', '* -eol', git_attr_path]) | ||
2466 | 1437 | except CalledProcessError: | ||
2467 | 1438 | # least-common case: attributes file exists, but does | ||
2468 | 1439 | # not contain our desired value | ||
2469 | 1440 | try: | ||
2470 | 1441 | with open(git_attr_path, 'a') as f: | ||
2471 | 1442 | f.write('* -eol\n') | ||
2472 | 1443 | except: | ||
2473 | 1444 | # failed all three cases to set our desired value in | ||
2474 | 1445 | # attributes file | ||
2475 | 1446 | logging.exception('Unable to set \'* -eol\' in %s' % | ||
2476 | 1447 | git_attr_path | ||
2477 | 1448 | ) | ||
2478 | 1449 | sys.exit(1) | ||
2479 | 1450 | |||
2480 | 1451 | def remote_exists(self, remote_name): | ||
2481 | 1452 | # https://github.com/libgit2/pygit2/issues/671 | ||
2482 | 1453 | return any(remote.name == remote_name for remote in self.raw_repo.remotes) | ||
2483 | 1454 | |||
2484 | 1455 | def _add_remote_by_fetch_url( | ||
2485 | 1456 | self, | ||
2486 | 1457 | remote_name, | ||
2487 | 1458 | fetch_url, | ||
2488 | 1459 | push_url=None, | ||
2489 | 1460 | changelog_notes=False, | ||
2490 | 1461 | ): | ||
2491 | 1462 | """Add a remote by URL | ||
2492 | 1463 | |||
2493 | 1464 | If a remote with the given name doesn't exist, then create it. | ||
2494 | 1465 | Otherwise, do nothing. | ||
2495 | 1466 | |||
2496 | 1467 | :param str remote_name: the name of the remote to create | ||
2497 | 1468 | :param str fetch_url: the fetch URL for the remote | ||
2498 | 1469 | :param str push_url: the push URL for the remote. If None, then a | ||
2499 | 1470 | specific push URL will not be set. | ||
2500 | 1471 | :param bool changelog_notes: if True, then a fetch refspec will be | ||
2501 | 1472 | added to fetch changelog notes. This only makes sense for an | ||
2502 | 1473 | official importer remote such as 'pkg'. | ||
2503 | 1474 | :returns: None | ||
2504 | 1475 | """ | ||
2505 | 1476 | if not self._fetch_proto: | ||
2506 | 1477 | raise Exception('Cannot fetch using an object without a protocol') | ||
2507 | 1478 | |||
2508 | 1479 | logging.debug('Adding %s as remote %s', fetch_url, remote_name) | ||
2509 | 1480 | |||
2510 | 1481 | if not self.remote_exists(remote_name): | ||
2511 | 1482 | self.raw_repo.remotes.create( | ||
2512 | 1483 | remote_name, | ||
2513 | 1484 | fetch_url, | ||
2514 | 1485 | '+refs/heads/*:refs/remotes/%s/*' % remote_name, | ||
2515 | 1486 | ) | ||
2516 | 1487 | # grab unreachable tags (orphans) | ||
2517 | 1488 | self.raw_repo.remotes.add_fetch( | ||
2518 | 1489 | remote_name, | ||
2519 | 1490 | '+refs/tags/*:refs/tags/%s/*' % remote_name, | ||
2520 | 1491 | ) | ||
2521 | 1492 | if changelog_notes: | ||
2522 | 1493 | # The changelog notes are kept at refs/notes/commits on | ||
2523 | 1494 | # Launchpad due to LP: #1871838 even though our standard place | ||
2524 | 1495 | # for them is refs/notes/changelog. | ||
2525 | 1496 | self.raw_repo.remotes.add_fetch( | ||
2526 | 1497 | remote_name, | ||
2527 | 1498 | '+refs/notes/commits:refs/notes/changelog', | ||
2528 | 1499 | ) | ||
2529 | 1500 | if push_url: | ||
2530 | 1501 | self.raw_repo.remotes.set_push_url( | ||
2531 | 1502 | remote_name, | ||
2532 | 1503 | push_url, | ||
2533 | 1504 | ) | ||
2534 | 1505 | self.git_run( | ||
2535 | 1506 | [ | ||
2536 | 1507 | 'config', | ||
2537 | 1508 | 'remote.%s.tagOpt' % remote_name, | ||
2538 | 1509 | '--no-tags', | ||
2539 | 1510 | ] | ||
2540 | 1511 | ) | ||
2541 | 1512 | |||
2542 | 1513 | def _add_remote(self, remote_name, remote_url, changelog_notes=False): | ||
2543 | 1514 | """Add a remote by URL location | ||
2544 | 1515 | |||
2545 | 1516 | URL location means the part of the URL after the proto:// prefix. The | ||
2546 | 1517 | protocol to be used will be determined by what was specified by the | ||
2547 | 1518 | fetch_proto at class instance construction time. Separate fetch and | ||
2548 | 1519 | push URL protocols will be automatically determined. | ||
2549 | 1520 | |||
2550 | 1521 | If a remote with the given name doesn't exist, then create it. | ||
2551 | 1522 | Otherwise, do nothing. | ||
2552 | 1523 | |||
2553 | 1524 | :param str remote_name: the name of the remote to create | ||
2554 | 1525 | :param str remote_url: the URL for the remote but with the proto:// | ||
2555 | 1526 | prefix missing. | ||
2556 | 1527 | :param bool changelog_notes: if True, then a fetch refspec will be | ||
2557 | 1528 | added to fetch changelog notes. This only makes sense for an | ||
2558 | 1529 | official importer remote such as 'pkg'. | ||
2559 | 1530 | :returns: None | ||
2560 | 1531 | """ | ||
2561 | 1532 | if not self._fetch_proto: | ||
2562 | 1533 | raise Exception('Cannot fetch using an object without a protocol') | ||
2563 | 1534 | if not self._lp_user: | ||
2564 | 1535 | raise RuntimeError("Cannot add remote without knowing lp_user") | ||
2565 | 1536 | fetch_url = '%s://%s' % (self._fetch_proto, remote_url) | ||
2566 | 1537 | push_url = 'ssh://%s@%s' % (self.lp_user, remote_url) | ||
2567 | 1538 | |||
2568 | 1539 | self._add_remote_by_fetch_url( | ||
2569 | 1540 | remote_name=remote_name, | ||
2570 | 1541 | fetch_url=fetch_url, | ||
2571 | 1542 | push_url=push_url, | ||
2572 | 1543 | changelog_notes=changelog_notes, | ||
2573 | 1544 | ) | ||
2574 | 1545 | |||
2575 | 1546 | def add_remote( | ||
2576 | 1547 | self, | ||
2577 | 1548 | pkgname, | ||
2578 | 1549 | repo_owner, | ||
2579 | 1550 | remote_name, | ||
2580 | 1551 | changelog_notes=False, | ||
2581 | 1552 | ): | ||
2582 | 1553 | """Add a remote to the repository configuration | ||
2583 | 1554 | :param str pkgname: the name of the source package reflected by this | ||
2584 | 1555 | repository. | ||
2585 | 1556 | :param str repo_owner: the name of the Launchpad user or team whose | ||
2586 | 1557 | repository for the package will be pointed to by this new remote. | ||
2587 | 1558 | If None, the default repository for the source package will be | ||
2588 | 1559 | used. | ||
2589 | 1560 | :param str remote_name: the name of the remote to add. | ||
2590 | 1561 | :param bool changelog_notes: if True, then a fetch refspec will be | ||
2591 | 1562 | added to fetch changelog notes. This only makes sense for an | ||
2592 | 1563 | official importer remote such as 'pkg'. | ||
2593 | 1564 | :returns: None | ||
2594 | 1565 | """ | ||
2595 | 1566 | if not self._fetch_proto: | ||
2596 | 1567 | raise Exception('Cannot fetch using an object without a protocol') | ||
2597 | 1568 | if repo_owner: | ||
2598 | 1569 | remote_url = ('git.launchpad.net/~%s/ubuntu/+source/%s' % | ||
2599 | 1570 | (repo_owner, pkgname)) | ||
2600 | 1571 | else: | ||
2601 | 1572 | remote_url = ('git.launchpad.net/ubuntu/+source/%s' % pkgname) | ||
2602 | 1573 | |||
2603 | 1574 | self._add_remote( | ||
2604 | 1575 | remote_name=remote_name, | ||
2605 | 1576 | remote_url=remote_url, | ||
2606 | 1577 | changelog_notes=changelog_notes, | ||
2607 | 1578 | ) | ||
2608 | 1579 | |||
2609 | 1580 | def add_remote_by_url(self, remote_name, fetch_url): | ||
2610 | 1581 | if not self._fetch_proto: | ||
2611 | 1582 | raise Exception('Cannot fetch using an object without a protocol') | ||
2612 | 1583 | |||
2613 | 1584 | self._add_remote_by_fetch_url(remote_name, fetch_url) | ||
2614 | 1585 | |||
2615 | 1586 | def add_base_remotes(self, pkgname, repo_owner=None): | ||
2616 | 1587 | """Add the 'pkg' base remote to the repository configuration | ||
2617 | 1588 | |||
2618 | 1589 | :param str pkgname: the name of the source package reflected by this | ||
2619 | 1590 | repository. | ||
2620 | 1591 | :param str repo_owner: the name of the Launchpad user or team whose | ||
2621 | 1592 | repository for the package will be pointed to by this new remote. | ||
2622 | 1593 | If None, the default repository for the source package will be | ||
2623 | 1594 | used. | ||
2624 | 1595 | :returns: None | ||
2625 | 1596 | """ | ||
2626 | 1597 | self.add_remote(pkgname, repo_owner, 'pkg', changelog_notes=True) | ||
2627 | 1598 | |||
2628 | 1599 | def add_lpuser_remote(self, pkgname): | ||
2629 | 1600 | if not self._fetch_proto: | ||
2630 | 1601 | raise Exception('Cannot add a fetch using an object without a protocol') | ||
2631 | 1602 | if not self._lp_user: | ||
2632 | 1603 | raise RuntimeError("Cannot add remote without knowing lp_user") | ||
2633 | 1604 | remote_url = ('git.launchpad.net/~%s/ubuntu/+source/%s' % | ||
2634 | 1605 | (self.lp_user, pkgname)) | ||
2635 | 1606 | |||
2636 | 1607 | self._add_remote(remote_name=self.lp_user, remote_url=remote_url) | ||
2637 | 1608 | # XXX: want a remote alias of 'lpme' -> self.lp_user | ||
2638 | 1609 | # self.git_run(['config', 'url.%s.insteadof' % self.lp_user, 'lpme']) | ||
2639 | 1610 | |||
2640 | 1611 | def fetch_remote(self, remote_name, verbose=False): | ||
2641 | 1612 | # Does not seem to be working with https | ||
2642 | 1613 | # https://github.com/libgit2/pygit2/issues/573 | ||
2643 | 1614 | # https://github.com/libgit2/libgit2/issues/3786 | ||
2644 | 1615 | # self.raw_repo.remotes[remote_name].fetch() | ||
2645 | 1616 | kwargs = {} | ||
2646 | 1617 | kwargs['verbose_on_failure'] = True | ||
2647 | 1618 | if verbose: | ||
2648 | 1619 | # If we are redirecting stdout/stderr to the console, we | ||
2649 | 1620 | # do not need to have run() also emit it | ||
2650 | 1621 | kwargs['verbose_on_failure'] = False | ||
2651 | 1622 | kwargs['stdout'] = None | ||
2652 | 1623 | kwargs['stderr'] = None | ||
2653 | 1624 | try: | ||
2654 | 1625 | logging.debug("Fetching remote %s", remote_name) | ||
2655 | 1626 | self.git_run( | ||
2656 | 1627 | args=['fetch', remote_name], | ||
2657 | 1628 | env={'GIT_TERMINAL_PROMPT': '0',}, | ||
2658 | 1629 | **kwargs | ||
2659 | 1630 | ) | ||
2660 | 1631 | except CalledProcessError: | ||
2661 | 1632 | raise GitUbuntuRepositoryFetchError( | ||
2662 | 1633 | "Unable to fetch remote %s" % remote_name | ||
2663 | 1634 | ) | ||
2664 | 1635 | |||
2665 | 1636 | def fetch_base_remotes(self, verbose=False): | ||
2666 | 1637 | self.fetch_remote(remote_name='pkg', verbose=verbose) | ||
2667 | 1638 | |||
2668 | 1639 | def fetch_remote_refspecs(self, remote_name, refspecs, verbose=False): | ||
2669 | 1640 | # Does not seem to be working with https | ||
2670 | 1641 | # https://github.com/libgit2/pygit2/issues/573 | ||
2671 | 1642 | # https://github.com/libgit2/libgit2/issues/3786 | ||
2672 | 1643 | # self.raw_repo.remotes[remote_name].fetch() | ||
2673 | 1644 | for refspec in refspecs: | ||
2674 | 1645 | kwargs = {} | ||
2675 | 1646 | kwargs['verbose_on_failure'] = True | ||
2676 | 1647 | if verbose: | ||
2677 | 1648 | # If we are redirecting stdout/stderr to the console, we | ||
2678 | 1649 | # do not need to have run() also emit it | ||
2679 | 1650 | kwargs['verbose_on_failure'] = False | ||
2680 | 1651 | kwargs['stdout'] = None | ||
2681 | 1652 | kwargs['stderr'] = None | ||
2682 | 1653 | try: | ||
2683 | 1654 | logging.debug( | ||
2684 | 1655 | "Fetching refspec %s from remote %s", | ||
2685 | 1656 | refspec, | ||
2686 | 1657 | remote_name, | ||
2687 | 1658 | ) | ||
2688 | 1659 | self.git_run( | ||
2689 | 1660 | args=['fetch', remote_name, refspec], | ||
2690 | 1661 | env={'GIT_TERMINAL_PROMPT': '0',}, | ||
2691 | 1662 | **kwargs, | ||
2692 | 1663 | ) | ||
2693 | 1664 | except CalledProcessError: | ||
2694 | 1665 | raise GitUbuntuRepositoryFetchError( | ||
2695 | 1666 | "Unable to fetch %s from remote %s" % ( | ||
2696 | 1667 | refspecs, | ||
2697 | 1668 | remote_name, | ||
2698 | 1669 | ) | ||
2699 | 1670 | ) | ||
2700 | 1671 | |||
2701 | 1672 | def fetch_lpuser_remote(self, verbose=False): | ||
2702 | 1673 | if not self._fetch_proto: | ||
2703 | 1674 | raise Exception('Cannot fetch using an object without a protocol') | ||
2704 | 1675 | if not self._lp_user: | ||
2705 | 1676 | raise RuntimeError("Cannot fetch without knowing lp_user") | ||
2706 | 1677 | self.fetch_remote(remote_name=self.lp_user, verbose=verbose) | ||
2707 | 1678 | |||
2708 | 1679 | def copy_base_references(self, namespace): | ||
2709 | 1680 | for ref in self.references: | ||
2710 | 1681 | for (target_refs, source_refs) in [ | ||
2711 | 1682 | ('refs/heads/%s/' % namespace, 'refs/remotes/pkg/'),]: | ||
2712 | 1683 | if ref.name.startswith(source_refs): | ||
2713 | 1684 | self.raw_repo.create_reference( | ||
2714 | 1685 | '%s/%s' % (target_refs, ref.name[len(source_refs):]), | ||
2715 | 1686 | ref.peel().id) | ||
2716 | 1687 | |||
2717 | 1688 | def delete_branches_in_namespace(self, namespace): | ||
2718 | 1689 | _local_branches = copy(self.local_branches) | ||
2719 | 1690 | for head in self.local_branches: | ||
2720 | 1691 | if head.branch_name.startswith(namespace): | ||
2721 | 1692 | head.delete() | ||
2722 | 1693 | |||
2723 | 1694 | def delete_tags_in_namespace(self, namespace): | ||
2724 | 1695 | _tags = copy(self.tags) | ||
2725 | 1696 | for ref in self.tags: | ||
2726 | 1697 | if ref.name.startswith('refs/tags/%s' % namespace): | ||
2727 | 1698 | ref.delete() | ||
2728 | 1699 | |||
2729 | 1700 | @property | ||
2730 | 1701 | def env(self): | ||
2731 | 1702 | # Return a copy of the cached _derive_env method result so that the | ||
2732 | 1703 | # caller cannot inadvertently modify our cached answer. Unfortunately | ||
2733 | 1704 | # this leaks the lru_cache-ness of the _derive_env method to this | ||
2734 | 1705 | # property getter, but this seems better than nothing. | ||
2735 | 1706 | return dict(self._derive_env()) | ||
2736 | 1707 | |||
2737 | 1708 | @lru_cache() | ||
2738 | 1709 | def _derive_env(self): | ||
2739 | 1710 | """Determine what the git CLI environment should be | ||
2740 | 1711 | |||
2741 | 1712 | This depends on the initial environment saved from the constructor and | ||
2742 | 1713 | the paths associated with self.raw_repo, neither of which should change | ||
2743 | 1714 | in the lifetime of this class instance. | ||
2744 | 1715 | """ | ||
2745 | 1716 | return _derive_git_cli_env( | ||
2746 | 1717 | self.raw_repo, | ||
2747 | 1718 | initial_env=self._initial_env | ||
2748 | 1719 | ) | ||
2749 | 1720 | |||
2750 | 1721 | @property | ||
2751 | 1722 | def local_dir(self): | ||
2752 | 1723 | """Base directory of this git repository (contains .git/)""" | ||
2753 | 1724 | return self._local_dir | ||
2754 | 1725 | |||
2755 | 1726 | @property | ||
2756 | 1727 | def git_dir(self): | ||
2757 | 1728 | """Same as cached object in the environment""" | ||
2758 | 1729 | return self.raw_repo.path | ||
2759 | 1730 | |||
2760 | 1731 | def _references(self, prefix=''): | ||
2761 | 1732 | return [self.raw_repo.lookup_reference(r) for r in | ||
2762 | 1733 | self.raw_repo.listall_references() if | ||
2763 | 1734 | r.startswith(prefix)] | ||
2764 | 1735 | |||
2765 | 1736 | def references_with_prefix(self, prefix): | ||
2766 | 1737 | return self._references(prefix) | ||
2767 | 1738 | |||
2768 | 1739 | @property | ||
2769 | 1740 | def references(self): | ||
2770 | 1741 | return self._references() | ||
2771 | 1742 | |||
2772 | 1743 | @property | ||
2773 | 1744 | def tags(self): | ||
2774 | 1745 | return self._references('refs/tags') | ||
2775 | 1746 | |||
2776 | 1747 | def _branches(self, | ||
2777 | 1748 | branch_type=pygit2.GIT_BRANCH_LOCAL | pygit2.GIT_BRANCH_REMOTE): | ||
2778 | 1749 | branches = [] | ||
2779 | 1750 | if branch_type & pygit2.GIT_BRANCH_LOCAL: | ||
2780 | 1751 | branches.extend([self.raw_repo.lookup_branch(b) for b in | ||
2781 | 1752 | self.raw_repo.listall_branches(pygit2.GIT_BRANCH_LOCAL)]) | ||
2782 | 1753 | if branch_type & pygit2.GIT_BRANCH_REMOTE: | ||
2783 | 1754 | branches.extend([self.raw_repo.lookup_branch(b, pygit2.GIT_BRANCH_REMOTE) for b in | ||
2784 | 1755 | self.raw_repo.listall_branches(pygit2.GIT_BRANCH_REMOTE)]) | ||
2785 | 1756 | return branches | ||
2786 | 1757 | |||
2787 | 1758 | @property | ||
2788 | 1759 | def branches(self): | ||
2789 | 1760 | return self._branches() | ||
2790 | 1761 | |||
2791 | 1762 | @property | ||
2792 | 1763 | def branch_names(self): | ||
2793 | 1764 | return [b.branch_name for b in self.branches] | ||
2794 | 1765 | |||
2795 | 1766 | @property | ||
2796 | 1767 | def local_branches(self): | ||
2797 | 1768 | return self._branches(pygit2.GIT_BRANCH_LOCAL) | ||
2798 | 1769 | |||
2799 | 1770 | @property | ||
2800 | 1771 | def local_branch_names(self): | ||
2801 | 1772 | return [b.branch_name for b in self.local_branches] | ||
2802 | 1773 | |||
2803 | 1774 | @property | ||
2804 | 1775 | def remote_branches(self): | ||
2805 | 1776 | return self._branches(pygit2.GIT_BRANCH_REMOTE) | ||
2806 | 1777 | |||
2807 | 1778 | @property | ||
2808 | 1779 | def remote_branch_names(self): | ||
2809 | 1780 | return [b.branch_name for b in self.remote_branches] | ||
2810 | 1781 | |||
2811 | 1782 | @property | ||
2812 | 1783 | def lp_user(self): | ||
2813 | 1784 | if not self._lp_user: | ||
2814 | 1785 | raise RuntimeError("lp_user is not set") | ||
2815 | 1786 | return self._lp_user | ||
2816 | 1787 | |||
2817 | 1788 | def get_commitish(self, commitish): | ||
2818 | 1789 | return self.raw_repo.revparse_single(commitish) | ||
2819 | 1790 | |||
2820 | 1791 | def head_to_commit(self, head_name): | ||
2821 | 1792 | return str(self.get_head_by_name(head_name).peel().id) | ||
2822 | 1793 | |||
2823 | 1794 | def get_short_hash(self, hash): | ||
2824 | 1795 | """Return an unambiguous but abbreviated form of a commit hash | ||
2825 | 1796 | |||
2826 | 1797 | Note that the hash may still become ambiguous in the future. | ||
2827 | 1798 | """ | ||
2828 | 1799 | stdout, _ = self.git_run(['rev-parse', '--short', hash]) | ||
2829 | 1800 | return stdout.strip() | ||
2830 | 1801 | |||
2831 | 1802 | def git_run(self, args, env=None, **kwargs): | ||
2832 | 1803 | """Run the git CLI with the provided arguments | ||
2833 | 1804 | |||
2834 | 1805 | :param list(str) args: arguments to the git CLI | ||
2835 | 1806 | :param dict env: additional environment variables to use | ||
2836 | 1807 | :param dict **kwargs: further arguments to pass through to | ||
2837 | 1808 | gitubuntu.run.run() | ||
2838 | 1809 | :raises subprocess.CalledProcessError: if git exits non-zero | ||
2839 | 1810 | :rtype: (str, str) | ||
2840 | 1811 | :returns: stdout and stderr strings containing the subprocess output | ||
2841 | 1812 | |||
2842 | 1813 | The environment used is based on the Python process' environment at the | ||
2843 | 1814 | time this class instance was constructed. | ||
2844 | 1815 | |||
2845 | 1816 | The GIT_DIR and GIT_WORK_TREE environment variables are set | ||
2846 | 1817 | automatically based on the repository's existing location and settings. | ||
2847 | 1818 | |||
2848 | 1819 | If env is set, then the environment to be used is updated with env | ||
2849 | 1820 | before the call to git is made. This can override GIT_DIR, | ||
2850 | 1821 | GIT_WORK_TREE, and anything else. | ||
2851 | 1822 | """ | ||
2852 | 1823 | return git_run( | ||
2853 | 1824 | pygit2_repo=self.raw_repo, | ||
2854 | 1825 | args=args, | ||
2855 | 1826 | initial_env=self._initial_env, | ||
2856 | 1827 | update_env=env, | ||
2857 | 1828 | **kwargs, | ||
2858 | 1829 | ) | ||
2859 | 1830 | |||
2860 | 1831 | def garbage_collect(self): | ||
2861 | 1832 | self.git_run(['gc']) | ||
2862 | 1833 | |||
2863 | 1834 | def extract_file_from_treeish(self, treeish_string, filename): | ||
2864 | 1835 | """extract a file from @treeish to a local file | ||
2865 | 1836 | |||
2866 | 1837 | Arguments: | ||
2867 | 1838 | treeish - SHA1 of treeish | ||
2868 | 1839 | filename - file to extract from @treeish | ||
2869 | 1840 | |||
2870 | 1841 | Returns a NamedTemporaryFile that is flushed but not rewound. | ||
2871 | 1842 | """ | ||
2872 | 1843 | blob = follow_symlinks_to_blob( | ||
2873 | 1844 | self.raw_repo, | ||
2874 | 1845 | treeish_object=self.raw_repo.revparse_single(treeish_string), | ||
2875 | 1846 | path=filename, | ||
2876 | 1847 | ) | ||
2877 | 1848 | outfile = tempfile.NamedTemporaryFile() | ||
2878 | 1849 | outfile.write(blob.data) | ||
2879 | 1850 | outfile.flush() | ||
2880 | 1851 | return outfile | ||
2881 | 1852 | |||
2882 | 1853 | @lru_cache() | ||
2883 | 1854 | def get_changelog_from_treeish(self, treeish_string): | ||
2884 | 1855 | return Changelog.from_treeish( | ||
2885 | 1856 | self.raw_repo, | ||
2886 | 1857 | self.raw_repo.revparse_single(treeish_string), | ||
2887 | 1858 | ) | ||
2888 | 1859 | |||
2889 | 1860 | def get_changelog_versions_from_treeish(self, treeish_string): | ||
2890 | 1861 | """Extract current and prior versions from debian/changelog in a | ||
2891 | 1862 | given @treeish_string | ||
2892 | 1863 | |||
2893 | 1864 | Returns (None, None) if the treeish supplied is None or if | ||
2894 | 1865 | 'debian/changelog' does not exist in the treeish. | ||
2895 | 1866 | |||
2896 | 1867 | Returns (current, previous) on success. | ||
2897 | 1868 | """ | ||
2898 | 1869 | try: | ||
2899 | 1870 | changelog = self.get_changelog_from_treeish(treeish_string) | ||
2900 | 1871 | except KeyError: | ||
2901 | 1872 | # If 'debian/changelog' does | ||
2902 | 1873 | # not exist, then (None, None) is returned. KeyError propagates up | ||
2903 | 1874 | # from Changelog's __init__. | ||
2904 | 1875 | return None, None | ||
2905 | 1876 | try: | ||
2906 | 1877 | return changelog.version, changelog.previous_version | ||
2907 | 1878 | except CalledProcessError: | ||
2908 | 1879 | raise GitUbuntuChangelogError( | ||
2909 | 1880 | 'Cannot get changelog versions' | ||
2910 | 1881 | ) | ||
2911 | 1882 | |||
2912 | 1883 | def get_changelog_distribution_from_treeish(self, treeish_string): | ||
2913 | 1884 | """Extract targetted distribution from debian/changelog in a | ||
2914 | 1885 | given treeish | ||
2915 | 1886 | """ | ||
2916 | 1887 | |||
2917 | 1888 | if treeish_string is None: | ||
2918 | 1889 | return None | ||
2919 | 1890 | |||
2920 | 1891 | try: | ||
2921 | 1892 | return self.get_changelog_from_treeish(treeish_string).distribution | ||
2922 | 1893 | except (KeyError, CalledProcessError): | ||
2923 | 1894 | raise GitUbuntuChangelogError( | ||
2924 | 1895 | 'Cannot get changelog distribution' | ||
2925 | 1896 | ) | ||
2926 | 1897 | |||
2927 | 1898 | def get_changelog_srcpkg_from_treeish(self, treeish_string): | ||
2928 | 1899 | """Extract srcpkg from debian/changelog in a given treeish | ||
2929 | 1900 | """ | ||
2930 | 1901 | |||
2931 | 1902 | if treeish_string is None: | ||
2932 | 1903 | return None | ||
2933 | 1904 | |||
2934 | 1905 | try: | ||
2935 | 1906 | return self.get_changelog_from_treeish(treeish_string).srcpkg | ||
2936 | 1907 | except (KeyError, CalledProcessError): | ||
2937 | 1908 | raise GitUbuntuChangelogError( | ||
2938 | 1909 | 'Cannot get changelog source package name' | ||
2939 | 1910 | ) | ||
2940 | 1911 | |||
2941 | 1912 | def get_head_info(self, head_prefix, namespace): | ||
2942 | 1913 | """Extract package versions at branch heads | ||
2943 | 1914 | |||
2944 | 1915 | Extract the version from debian/changelog of all | ||
2945 | 1916 | f'{namespace}/{head_prefix>/*' branches, excluding any branch that | ||
2946 | 1917 | contains 'ubuntu/devel'. | ||
2947 | 1918 | |||
2948 | 1919 | :param str namespace: the namespace under which git refs are found | ||
2949 | 1920 | :param str head_prefix: the prefix to look for | ||
2950 | 1921 | :rtype: dict(str, HeadInfoItem) | ||
2951 | 1922 | :returns: a dictionary keyed by the namespaced branch name (ie. without | ||
2952 | 1923 | a 'refs/heads/' prefix but with the namespace prefix, eg. | ||
2953 | 1924 | 'importer/ubuntu/focal-devel'). | ||
2954 | 1925 | """ | ||
2955 | 1926 | head_info = dict() | ||
2956 | 1927 | for head in self.local_branches: | ||
2957 | 1928 | prefix = '%s/%s' % (namespace, head_prefix) | ||
2958 | 1929 | if not head.branch_name.startswith(prefix): | ||
2959 | 1930 | continue | ||
2960 | 1931 | if 'ubuntu/devel' in head.branch_name: | ||
2961 | 1932 | continue | ||
2962 | 1933 | version, _ = ( | ||
2963 | 1934 | self.get_changelog_versions_from_treeish(str(head.peel().id)) | ||
2964 | 1935 | ) | ||
2965 | 1936 | head_info[head.branch_name] = HeadInfoItem( | ||
2966 | 1937 | version=version, | ||
2967 | 1938 | commit_time=head.peel().commit_time, | ||
2968 | 1939 | commit_id=head.peel().id, | ||
2969 | 1940 | ) | ||
2970 | 1941 | |||
2971 | 1942 | return head_info | ||
2972 | 1943 | |||
2973 | 1944 | def treeishs_identical(self, treeish_string1, treeish_string2): | ||
2974 | 1945 | if treeish_string1 is None or treeish_string2 is None: | ||
2975 | 1946 | return False | ||
2976 | 1947 | _tree_obj1 = self.raw_repo.revparse_single(treeish_string1) | ||
2977 | 1948 | _tree_id1 = _tree_obj1.peel(pygit2.Tree).id | ||
2978 | 1949 | _tree_obj2 = self.raw_repo.revparse_single(treeish_string2) | ||
2979 | 1950 | _tree_id2 = _tree_obj2.peel(pygit2.Tree).id | ||
2980 | 1951 | return _tree_id1 == _tree_id2 | ||
2981 | 1952 | |||
2982 | 1953 | def get_head_by_name(self, name): | ||
2983 | 1954 | try: | ||
2984 | 1955 | return self.raw_repo.lookup_branch(name) | ||
2985 | 1956 | except TypeError: | ||
2986 | 1957 | return None | ||
2987 | 1958 | |||
2988 | 1959 | def get_tag_reference(self, tag): | ||
2989 | 1960 | """Return the tag object if it exists in the repository""" | ||
2990 | 1961 | try: | ||
2991 | 1962 | return self.raw_repo.lookup_reference('refs/tags/%s' % tag) | ||
2992 | 1963 | except (KeyError, ValueError): | ||
2993 | 1964 | return None | ||
2994 | 1965 | |||
2995 | 1966 | def get_import_tag( | ||
2996 | 1967 | self, | ||
2997 | 1968 | version, | ||
2998 | 1969 | namespace, | ||
2999 | 1970 | patch_state=PatchState.UNAPPLIED, | ||
3000 | 1971 | ): | ||
3001 | 1972 | """ | ||
3002 | 1973 | Return the import tag matching the given specification. | ||
3003 | 1974 | |||
3004 | 1975 | :param str version: the package version string to match | ||
3005 | 1976 | :param str namespace: the namespace under which git refs are found | ||
3006 | 1977 | :param PatchState patch_state: whether to look for unapplied or applied | ||
3007 | 1978 | tags | ||
3008 | 1979 | :returns: the matching import tag, or None if there is no match | ||
3009 | 1980 | :rtype: pygit2.Reference or None | ||
3010 | 1981 | """ | ||
3011 | 1982 | return self.get_tag_reference( | ||
3012 | 1983 | import_tag(version, namespace, patch_state) | ||
3013 | 1984 | ) | ||
3014 | 1985 | |||
3015 | 1986 | def get_reimport_tag( | ||
3016 | 1987 | self, | ||
3017 | 1988 | version, | ||
3018 | 1989 | namespace, | ||
3019 | 1990 | reimport, | ||
3020 | 1991 | patch_state=PatchState.UNAPPLIED, | ||
3021 | 1992 | ): | ||
3022 | 1993 | """ | ||
3023 | 1994 | Return the reimport tag matching the given specification. | ||
3024 | 1995 | |||
3025 | 1996 | :param str version: the package version string to match | ||
3026 | 1997 | :param str namespace: the namespace under which git refs are found | ||
3027 | 1998 | :param int reimport: the sequence number of the reimport tag | ||
3028 | 1999 | :param PatchState patch_state: whether to look for unapplied or applied | ||
3029 | 2000 | tags | ||
3030 | 2001 | :returns: the matching reimport tag, or None if there is no match | ||
3031 | 2002 | :rtype: pygit2.Reference or None | ||
3032 | 2003 | """ | ||
3033 | 2004 | return self.get_tag_reference( | ||
3034 | 2005 | reimport_tag(version, namespace, reimport, patch_state) | ||
3035 | 2006 | ) | ||
3036 | 2007 | |||
3037 | 2008 | def get_all_reimport_tags( | ||
3038 | 2009 | self, | ||
3039 | 2010 | version, | ||
3040 | 2011 | namespace, | ||
3041 | 2012 | patch_state=PatchState.UNAPPLIED, | ||
3042 | 2013 | ): | ||
3043 | 2014 | """ | ||
3044 | 2015 | Return all reimport tags matching the given specification. | ||
3045 | 2016 | |||
3046 | 2017 | :param str version: the package version string to match | ||
3047 | 2018 | :param str namespace: the namespace under which git refs are found | ||
3048 | 2019 | :param PatchState patch_state: whether to look for unapplied or applied | ||
3049 | 2020 | tags | ||
3050 | 2021 | :returns: matching reimport tags | ||
3051 | 2022 | :rtype: sequence(pygit2.Reference) | ||
3052 | 2023 | """ | ||
3053 | 2024 | return self.references_with_prefix( | ||
3054 | 2025 | 'refs/tags/%s/' % reimport_tag_prefix( | ||
3055 | 2026 | version, | ||
3056 | 2027 | namespace, | ||
3057 | 2028 | patch_state, | ||
3058 | 2029 | ) | ||
3059 | 2030 | ) | ||
3060 | 2031 | |||
3061 | 2032 | def get_upload_tag(self, version, namespace): | ||
3062 | 2033 | """ | ||
3063 | 2034 | Return the upload tag matching the given specification. | ||
3064 | 2035 | |||
3065 | 2036 | :param str version: the package version string to match | ||
3066 | 2037 | :param str namespace: the namespace under which git refs are found | ||
3067 | 2038 | :returns: the matching upload tag, or None if there is no match | ||
3068 | 2039 | :rtype: pygit2.Reference or None | ||
3069 | 2040 | """ | ||
3070 | 2041 | return self.get_tag_reference(upload_tag(version, namespace)) | ||
3071 | 2042 | |||
3072 | 2043 | def get_upstream_tag(self, version, namespace): | ||
3073 | 2044 | """ | ||
3074 | 2045 | Return the upstream tag matching the given specification. | ||
3075 | 2046 | |||
3076 | 2047 | :param str version: the package version string to match | ||
3077 | 2048 | :param str namespace: the namespace under which git refs are found | ||
3078 | 2049 | :returns: the matching upstream tag, or None if there is no match | ||
3079 | 2050 | :rtype: pygit2.Reference or None | ||
3080 | 2051 | """ | ||
3081 | 2052 | return self.get_tag_reference(upstream_tag(version, namespace)) | ||
3082 | 2053 | |||
3083 | 2054 | def get_orphan_tag(self, version, namespace): | ||
3084 | 2055 | """ | ||
3085 | 2056 | Return the orphan tag matching the given specification. | ||
3086 | 2057 | |||
3087 | 2058 | :param str version: the package version string to match | ||
3088 | 2059 | :param str namespace: the namespace under which git refs are found | ||
3089 | 2060 | :returns: the matching orphan tag, or None if there is no match | ||
3090 | 2061 | :rtype: pygit2.Reference or None | ||
3091 | 2062 | """ | ||
3092 | 2063 | return self.get_tag_reference(orphan_tag(version, namespace)) | ||
3093 | 2064 | |||
3094 | 2065 | def create_tag(self, | ||
3095 | 2066 | commit_hash, | ||
3096 | 2067 | tag_name, | ||
3097 | 2068 | tag_msg, | ||
3098 | 2069 | tagger=None, | ||
3099 | 2070 | ): | ||
3100 | 2071 | """Create a tag in the repository | ||
3101 | 2072 | |||
3102 | 2073 | :param str commit_hash: the commit hash the tag will point to. | ||
3103 | 2074 | :param str tag_name: the name of the tag to be created. | ||
3104 | 2075 | :param str tag_msg: the text of the tag annotation. | ||
3105 | 2076 | :param pygit2.Signature tagger: if supplied, use this signature in the | ||
3106 | 2077 | created tag's "tagger" metadata. If not supplied, an arbitrary name | ||
3107 | 2078 | and email address is used with the current time. | ||
3108 | 2079 | :returns: None | ||
3109 | 2080 | """ | ||
3110 | 2081 | if not tagger: | ||
3111 | 2082 | tagger_time, tagger_offset = datetime_to_signature_spec( | ||
3112 | 2083 | datetime.datetime.now(), | ||
3113 | 2084 | ) | ||
3114 | 2085 | tagger = pygit2.Signature( | ||
3115 | 2086 | gitubuntu.spec.SYNTHESIZED_COMMITTER_NAME, | ||
3116 | 2087 | gitubuntu.spec.SYNTHESIZED_COMMITTER_EMAIL, | ||
3117 | 2088 | tagger_time, | ||
3118 | 2089 | tagger_offset, | ||
3119 | 2090 | ) | ||
3120 | 2091 | |||
3121 | 2092 | logging.debug("Creating tag %s pointing to %s", tag_name, commit_hash) | ||
3122 | 2093 | self.raw_repo.create_tag( | ||
3123 | 2094 | tag_name, | ||
3124 | 2095 | pygit2.Oid(hex=commit_hash), | ||
3125 | 2096 | pygit2.GIT_OBJ_COMMIT, | ||
3126 | 2097 | tagger, | ||
3127 | 2098 | tag_msg, | ||
3128 | 2099 | ) | ||
3129 | 2100 | |||
3130 | 2101 | def nearest_remote_branches(self, commit_hash, prefix=None, | ||
3131 | 2102 | max_commits=100 | ||
3132 | 2103 | ): | ||
3133 | 2104 | '''Return the set of remote branches nearest to @commit_hash | ||
3134 | 2105 | |||
3135 | 2106 | This is a set of remote branch objects that are currently | ||
3136 | 2107 | pointing at a commit, where that commit is the nearest ancestor | ||
3137 | 2108 | to @commit_hash among the possible commits. | ||
3138 | 2109 | |||
3139 | 2110 | If no such commit is found, an empty set is returned. | ||
3140 | 2111 | |||
3141 | 2112 | Only consider remote branches that start with @prefix. | ||
3142 | 2113 | |||
3143 | 2114 | Stop searching beyond the @max_commits'-th ancestor. Usually this method | ||
3144 | 2115 | is only used as a heuristic that generally will never need to go too far | ||
3145 | 2116 | back in history, and this avoids searching all the way back to the root | ||
3146 | 2117 | commit, which may be a long way. | ||
3147 | 2118 | ''' | ||
3148 | 2119 | |||
3149 | 2120 | # 1) cache all prefixed branch names by commit | ||
3150 | 2121 | remote_heads_by_commit = collections.defaultdict(set) | ||
3151 | 2122 | for b in self.remote_branches: | ||
3152 | 2123 | if prefix is None or b.branch_name.startswith(prefix): | ||
3153 | 2124 | remote_heads_by_commit[b.peel().id].add(b) | ||
3154 | 2125 | |||
3155 | 2126 | # 2) walk from commit_hash backwards until a cached commit is found | ||
3156 | 2127 | commits = self.raw_repo.walk( | ||
3157 | 2128 | self.get_commitish(commit_hash).id, | ||
3158 | 2129 | pygit2.GIT_SORT_TOPOLOGICAL, | ||
3159 | 2130 | ) | ||
3160 | 2131 | for commit in itertools.islice(commits, max_commits): | ||
3161 | 2132 | if commit.id not in remote_heads_by_commit: | ||
3162 | 2133 | continue # avoid creating a bunch of empty sets | ||
3163 | 2134 | |||
3164 | 2135 | if remote_heads_by_commit[commit.id]: | ||
3165 | 2136 | return remote_heads_by_commit[commit.id] | ||
3166 | 2137 | |||
3167 | 2138 | # in the currently impossible (but permitted in this state) case | ||
3168 | 2139 | # that the dictionary returned an empty set, we loop around again | ||
3169 | 2140 | # which is what we want. | ||
3170 | 2141 | |||
3171 | 2142 | return set() | ||
3172 | 2143 | |||
3173 | 2144 | |||
3174 | 2145 | def nearest_tag( | ||
3175 | 2146 | self, | ||
3176 | 2147 | commitish_string, | ||
3177 | 2148 | prefix, | ||
3178 | 2149 | max_commits=100, | ||
3179 | 2150 | ): | ||
3180 | 2151 | # 1) cache all patterned tag names by commit | ||
3181 | 2152 | pattern_tags_by_commit = collections.defaultdict(set) | ||
3182 | 2153 | for t in self.tags: | ||
3183 | 2154 | if t.name.startswith('refs/tags/' + prefix): | ||
3184 | 2155 | pattern_tags_by_commit[t.peel(pygit2.Commit).id].add(t) | ||
3185 | 2156 | |||
3186 | 2157 | commits = self.raw_repo.walk( | ||
3187 | 2158 | self.get_commitish(commitish_string).id, | ||
3188 | 2159 | pygit2.GIT_SORT_TOPOLOGICAL, | ||
3189 | 2160 | ) | ||
3190 | 2161 | for commit in itertools.islice(commits, max_commits): | ||
3191 | 2162 | if commit.id not in pattern_tags_by_commit: | ||
3192 | 2163 | continue | ||
3193 | 2164 | |||
3194 | 2165 | return pattern_tags_by_commit[commit.id].pop() | ||
3195 | 2166 | |||
3196 | 2167 | return None | ||
3197 | 2168 | |||
3198 | 2169 | @staticmethod | ||
3199 | 2170 | def tag_to_pretty_name(tag): | ||
3200 | 2171 | _, _, pretty_name = tag.name.partition('refs/tags/') | ||
3201 | 2172 | return pretty_name | ||
3202 | 2173 | |||
3203 | 2174 | def create_tracking_branch(self, branch_name, upstream_name, force=False): | ||
3204 | 2175 | return self.raw_repo.create_branch( | ||
3205 | 2176 | branch_name, | ||
3206 | 2177 | self.raw_repo.lookup_branch( | ||
3207 | 2178 | upstream_name, | ||
3208 | 2179 | pygit2.GIT_BRANCH_REMOTE | ||
3209 | 2180 | ).peel(pygit2.Commit), | ||
3210 | 2181 | force | ||
3211 | 2182 | ) | ||
3212 | 2183 | |||
3213 | 2184 | def checkout_commitish(self, commitish): | ||
3214 | 2185 | # pygit2 checkout does not accept hashes | ||
3215 | 2186 | # https://github.com/libgit2/pygit2/issues/412 | ||
3216 | 2187 | # self.raw_repo.checkout_tree(self.get_commitish(commitish)) | ||
3217 | 2188 | self.git_run(['checkout', commitish]) | ||
3218 | 2189 | |||
3219 | 2190 | def reset_commitish(self, commitish): | ||
3220 | 2191 | # pygit2 checkout does not accept hashes | ||
3221 | 2192 | # https://github.com/libgit2/pygit2/issues/412 | ||
3222 | 2193 | # self.checkout_tree(self.get_commitish(commitish)) | ||
3223 | 2194 | self.git_run(['reset', '--hard', commitish]) | ||
3224 | 2195 | |||
3225 | 2196 | def update_head_to_commit(self, head_name, commit_hash): | ||
3226 | 2197 | try: | ||
3227 | 2198 | self.raw_repo.lookup_branch(head_name).set_target(commit_hash) | ||
3228 | 2199 | except AttributeError: | ||
3229 | 2200 | self.raw_repo.create_branch(head_name, | ||
3230 | 2201 | self.raw_repo.get(commit_hash) | ||
3231 | 2202 | ) | ||
3232 | 2203 | |||
3233 | 2204 | def clean_repository_state(self): | ||
3234 | 2205 | """Cleanup working tree""" | ||
3235 | 2206 | runq(['git', 'checkout', '--orphan', 'master'], | ||
3236 | 2207 | check=False, env=self.env) | ||
3237 | 2208 | runq(['git', 'reset', '--hard'], env=self.env) | ||
3238 | 2209 | runq(['git', 'clean', '-f', '-d'], env=self.env) | ||
3239 | 2210 | |||
3240 | 2211 | def get_all_changelog_versions_from_treeish(self, treeish): | ||
3241 | 2212 | changelog = self.get_changelog_from_treeish(treeish) | ||
3242 | 2213 | return changelog.all_versions | ||
3243 | 2214 | |||
3244 | 2215 | def annotated_tag(self, tag_name, commitish, force, msg=None): | ||
3245 | 2216 | try: | ||
3246 | 2217 | args = ['tag', '-a', tag_name, commitish] | ||
3247 | 2218 | if force: | ||
3248 | 2219 | args += ['-f'] | ||
3249 | 2220 | if msg is not None: | ||
3250 | 2221 | args += ['-m', msg] | ||
3251 | 2222 | self.git_run(args, stdin=None, stdout=None, stderr=None) | ||
3252 | 2223 | version, _ = self.get_changelog_versions_from_treeish(commitish) | ||
3253 | 2224 | logging.info('Created annotated tag %s for version %s' % (tag_name, version)) | ||
3254 | 2225 | except: | ||
3255 | 2226 | logging.error('Unable to tag %s. Does it already exist (pass -f)?' % | ||
3256 | 2227 | tag_name | ||
3257 | 2228 | ) | ||
3258 | 2229 | raise | ||
3259 | 2230 | |||
3260 | 2231 | def tag(self, tag_name, commitish, force): | ||
3261 | 2232 | try: | ||
3262 | 2233 | args = ['tag', tag_name, commitish] | ||
3263 | 2234 | if force: | ||
3264 | 2235 | args += ['-f'] | ||
3265 | 2236 | self.git_run(args) | ||
3266 | 2237 | version, _ = self.get_changelog_versions_from_treeish(commitish) | ||
3267 | 2238 | logging.info('Created tag %s for version %s' % (tag_name, version)) | ||
3268 | 2239 | except: | ||
3269 | 2240 | logging.error('Unable to tag %s. Does it already exist (pass -f)?' % | ||
3270 | 2241 | tag_name | ||
3271 | 2242 | ) | ||
3272 | 2243 | raise | ||
3273 | 2244 | |||
3274 | 2245 | def commit_source_tree( | ||
3275 | 2246 | self, | ||
3276 | 2247 | tree, | ||
3277 | 2248 | parents, | ||
3278 | 2249 | log_message, | ||
3279 | 2250 | commit_date=None, | ||
3280 | 2251 | author_date=None, | ||
3281 | 2252 | ): | ||
3282 | 2253 | """Commit a git tree with appropriate parents and message | ||
3283 | 2254 | |||
3284 | 2255 | Given a git tree that contains a source package, create a matching | ||
3285 | 2256 | commit using metadata derived from the tree as required according to | ||
3286 | 2257 | the import specification. | ||
3287 | 2258 | |||
3288 | 2259 | Commit metadata elements that are not specified as derived from the | ||
3289 | 2260 | tree itself are required as parameters. | ||
3290 | 2261 | |||
3291 | 2262 | :param pygit2.Oid tree: reference to the git tree in this repository | ||
3292 | 2263 | that contains a debian/changelog file | ||
3293 | 2264 | :param list(pygit2.Oid) parents: parent commits of the commit to be | ||
3294 | 2265 | created | ||
3295 | 2266 | :param bytes log_message: commit message | ||
3296 | 2267 | :param datetime.datetime commit_date: the commit date to use (any | ||
3297 | 2268 | sub-second part of the timestamp is truncated). If None, use the | ||
3298 | 2269 | current date. | ||
3299 | 2270 | :param datetime.datetime author_date: overrides the author date | ||
3300 | 2271 | normally parsed from the changelog entry (i.e. for handling date | ||
3301 | 2272 | parsing edge cases). Any sub-second part of the timestamp is | ||
3302 | 2273 | truncated. | ||
3303 | 2274 | :returns: reference to the created commit | ||
3304 | 2275 | :rtype: pygit2.Oid | ||
3305 | 2276 | """ | ||
3306 | 2277 | if commit_date is None: | ||
3307 | 2278 | commit_date = datetime.datetime.now() | ||
3308 | 2279 | |||
3309 | 2280 | commit_time, commit_offset = datetime_to_signature_spec(commit_date) | ||
3310 | 2281 | changelog = self.get_changelog_from_treeish(str(tree)) | ||
3311 | 2282 | |||
3312 | 2283 | return self.raw_repo.create_commit( | ||
3313 | 2284 | None, # ref: do not update any ref | ||
3314 | 2285 | pygit2.Signature(*changelog.git_authorship(author_date)), # author | ||
3315 | 2286 | pygit2.Signature( # committer | ||
3316 | 2287 | name=gitubuntu.spec.SYNTHESIZED_COMMITTER_NAME, | ||
3317 | 2288 | email=gitubuntu.spec.SYNTHESIZED_COMMITTER_EMAIL, | ||
3318 | 2289 | time=commit_time, | ||
3319 | 2290 | offset=commit_offset, | ||
3320 | 2291 | ), | ||
3321 | 2292 | log_message, # message | ||
3322 | 2293 | tree, # tree | ||
3323 | 2294 | parents, # parents | ||
3324 | 2295 | ) | ||
3325 | 2296 | |||
3326 | 2297 | |||
3327 | 2298 | @classmethod | ||
3328 | 2299 | def _create_replacement_tree_builder(cls, repo, treeish, sub_path): | ||
3329 | 2300 | '''Create a replacement TreeBuilder | ||
3330 | 2301 | |||
3331 | 2302 | Create a TreeBuilder based on an existing repository, top-level | ||
3332 | 2303 | tree-ish and path inside that tree. | ||
3333 | 2304 | |||
3334 | 2305 | A sub_path of '' is taken to mean a request for a replacement | ||
3335 | 2306 | TreeBuilder for the top level tree. | ||
3336 | 2307 | |||
3337 | 2308 | Returns a TreeBuilder object pre-populated with the previous contents. | ||
3338 | 2309 | If the path did not previously exist in the tree-ish, then return an | ||
3339 | 2310 | empty TreeBuilder instead. | ||
3340 | 2311 | ''' | ||
3341 | 2312 | |||
3342 | 2313 | tree = treeish.peel(pygit2.GIT_OBJ_TREE) | ||
3343 | 2314 | |||
3344 | 2315 | # Short path: sub_path == '' means want root | ||
3345 | 2316 | if not sub_path: | ||
3346 | 2317 | return repo.TreeBuilder(tree) | ||
3347 | 2318 | |||
3348 | 2319 | try: | ||
3349 | 2320 | tree_entry = tree[sub_path] | ||
3350 | 2321 | except KeyError: | ||
3351 | 2322 | # sub_path does not exist in tree, so return an empty TreeBuilder | ||
3352 | 2323 | tree_builder = repo.TreeBuilder() | ||
3353 | 2324 | else: | ||
3354 | 2325 | # The tree entry must itself be a tree | ||
3355 | 2326 | assert tree_entry.filemode == pygit2.GIT_FILEMODE_TREE | ||
3356 | 2327 | sub_tree = repo.get(tree_entry.id).peel(pygit2.GIT_OBJ_TREE) | ||
3357 | 2328 | tree_builder = repo.TreeBuilder(sub_tree) | ||
3358 | 2329 | |||
3359 | 2330 | return tree_builder | ||
3360 | 2331 | |||
3361 | 2332 | @classmethod | ||
3362 | 2333 | def _add_missing_tree_dirs(cls, repo, top_path, top_tree_object, _sub_path=''): | ||
3363 | 2334 | """ | ||
3364 | 2335 | Recursively add empty directories to a tree object | ||
3365 | 2336 | |||
3366 | 2337 | Find empty directories under top_path and make sure that empty tree | ||
3367 | 2338 | objects exist for them. If this means that the tree object must change, | ||
3368 | 2339 | then a replacement tree object is created accordingly. | ||
3369 | 2340 | |||
3370 | 2341 | repo: pygit2.Repository object | ||
3371 | 2342 | top_path: path to the extracted contents of the tree | ||
3372 | 2343 | top_tree_object: tree object | ||
3373 | 2344 | _sub_path (internal): relative path for where we are for recursive call | ||
3374 | 2345 | |||
3375 | 2346 | Returns None if oid unchanged, or oid if it changed. | ||
3376 | 2347 | """ | ||
3377 | 2348 | |||
3378 | 2349 | # full path to our _sub_path, including top_path | ||
3379 | 2350 | full_path = os.path.join(top_path, _sub_path) | ||
3380 | 2351 | |||
3381 | 2352 | dir_list = os.listdir(full_path) | ||
3382 | 2353 | if not dir_list: | ||
3383 | 2354 | # directory is empty, so this is always the empty tree object | ||
3384 | 2355 | return repo.TreeBuilder().write() | ||
3385 | 2356 | |||
3386 | 2357 | # tree_builder is None if we don't need one yet, or is the replacement | ||
3387 | 2358 | # for the tree object for this recursive call | ||
3388 | 2359 | tree_builder = None | ||
3389 | 2360 | for entry in dir_list: | ||
3390 | 2361 | entry_path = os.path.join(full_path, entry) | ||
3391 | 2362 | # We cannot use os.path.isdir() here as we don't want to recurse | ||
3392 | 2363 | # down symlinks to directories. | ||
3393 | 2364 | if stat.S_ISDIR(os.lstat(entry_path).st_mode): | ||
3394 | 2365 | # this is a directory, so recurse down | ||
3395 | 2366 | entry_oid = cls._add_missing_tree_dirs( | ||
3396 | 2367 | repo=repo, | ||
3397 | 2368 | top_path=top_path, | ||
3398 | 2369 | top_tree_object=top_tree_object, | ||
3399 | 2370 | _sub_path=os.path.join(_sub_path, entry), | ||
3400 | 2371 | ) | ||
3401 | 2372 | if entry_oid: | ||
3402 | 2373 | # The recursive call reported a change to the tree, so we | ||
3403 | 2374 | # must adopt it in what we return to propogate the change | ||
3404 | 2375 | # upwards. | ||
3405 | 2376 | if tree_builder is None: | ||
3406 | 2377 | # There is no replacement in progress for this | ||
3407 | 2378 | # recursive call's tree object, so start one. | ||
3408 | 2379 | tree_builder = cls._create_replacement_tree_builder( | ||
3409 | 2380 | repo=repo, | ||
3410 | 2381 | treeish=top_tree_object, | ||
3411 | 2382 | sub_path=_sub_path, | ||
3412 | 2383 | ) | ||
3413 | 2384 | # If the entry previous existed, remove it. | ||
3414 | 2385 | if tree_builder.get(entry): | ||
3415 | 2386 | tree_builder.remove(entry) | ||
3416 | 2387 | # Add the replacement tree entry | ||
3417 | 2388 | tree_builder.insert( # (takes no kwargs) | ||
3418 | 2389 | entry, # name | ||
3419 | 2390 | entry_oid, # oid | ||
3420 | 2391 | pygit2.GIT_FILEMODE_TREE, # attr | ||
3421 | 2392 | ) | ||
3422 | 2393 | |||
3423 | 2394 | if tree_builder is None: | ||
3424 | 2395 | return None # no changes | ||
3425 | 2396 | else: | ||
3426 | 2397 | return tree_builder.write() # create replacement tree object | ||
3427 | 2398 | |||
3428 | 2399 | @classmethod | ||
3429 | 2400 | def dir_to_tree(cls, pygit2_repo, path, escape=False): | ||
3430 | 2401 | """Create a git tree object from the given filesystem path | ||
3431 | 2402 | |||
3432 | 2403 | :param pygit2.Repository pygit2_repo: the repository on which to | ||
3433 | 2404 | operate. If you have a GitUbuntuRepository instance, you can use | ||
3434 | 2405 | its raw_repo property. | ||
3435 | 2406 | :param path: path to filesystem directory to be the root of the tree | ||
3436 | 2407 | :param escape: if True, escape using escape_dot_git() first. This | ||
3437 | 2408 | mutates the provided filesystem tree. | ||
3438 | 2409 | |||
3439 | 2410 | escape should be used when the directory being moved into git is | ||
3440 | 2411 | directly from a source package, since the source package may contain | ||
3441 | 2412 | files or directories named '.git' and these cannot otherwise be | ||
3442 | 2413 | represented in a git tree object. | ||
3443 | 2414 | |||
3444 | 2415 | escape should not be used if the directory has already been escaped | ||
3445 | 2416 | previously. For example: if escape was previously used to move into a | ||
3446 | 2417 | git tree object, and that git tree object has been extracted to a | ||
3447 | 2418 | working directory for manipulation without unescaping, then escape | ||
3448 | 2419 | should not be used again to move that result back into a git tree | ||
3449 | 2420 | object. | ||
3450 | 2421 | """ | ||
3451 | 2422 | if escape: | ||
3452 | 2423 | escape_dot_git(path) | ||
3453 | 2424 | # git expects the index file to not exist (in order to create a fresh | ||
3454 | 2425 | # one), so create a temporary directory to put it in so we have a name | ||
3455 | 2426 | # we can use safely. | ||
3456 | 2427 | with tempfile.TemporaryDirectory() as index_dir: | ||
3457 | 2428 | index_path = os.path.join(index_dir, 'index') | ||
3458 | 2429 | def indexed_git_run(*args): | ||
3459 | 2430 | return git_run( | ||
3460 | 2431 | pygit2_repo=pygit2_repo, | ||
3461 | 2432 | args=args, | ||
3462 | 2433 | work_tree_path=path, | ||
3463 | 2434 | index_path=index_path, | ||
3464 | 2435 | ) | ||
3465 | 2436 | indexed_git_run('add', '-f', '-A') | ||
3466 | 2437 | indexed_git_run('reset', 'HEAD', '--', '.git') | ||
3467 | 2438 | indexed_git_run('reset', 'HEAD', '--', '.pc') | ||
3468 | 2439 | tree_hash_str, _ = indexed_git_run('write-tree') | ||
3469 | 2440 | tree_hash_str = tree_hash_str.strip() | ||
3470 | 2441 | tree = pygit2_repo.get(tree_hash_str) | ||
3471 | 2442 | |||
3472 | 2443 | # Add any empty directories that git did not import. Workaround for LP: | ||
3473 | 2444 | # #1687057. | ||
3474 | 2445 | replacement_oid = cls._add_missing_tree_dirs( | ||
3475 | 2446 | repo=pygit2_repo, | ||
3476 | 2447 | top_path=path, | ||
3477 | 2448 | top_tree_object=tree, | ||
3478 | 2449 | ) | ||
3479 | 2450 | if replacement_oid: | ||
3480 | 2451 | # Empty directories had to be added | ||
3481 | 2452 | return str(replacement_oid) # return the replacement instead | ||
3482 | 2453 | else: | ||
3483 | 2454 | # No empty directories were added | ||
3484 | 2455 | return tree_hash_str # no replacement was needed | ||
3485 | 2456 | |||
3486 | 2457 | @contextmanager | ||
3487 | 2458 | def temporary_worktree(self, commitish, prefix=None): | ||
3488 | 2459 | with tempfile.TemporaryDirectory(prefix=prefix) as tempdir: | ||
3489 | 2460 | self.git_run( | ||
3490 | 2461 | [ | ||
3491 | 2462 | 'worktree', | ||
3492 | 2463 | 'add', | ||
3493 | 2464 | '--detach', | ||
3494 | 2465 | '--force', | ||
3495 | 2466 | tempdir, | ||
3496 | 2467 | commitish, | ||
3497 | 2468 | ] | ||
3498 | 2469 | ) | ||
3499 | 2470 | |||
3500 | 2471 | oldcwd = os.getcwd() | ||
3501 | 2472 | os.chdir(tempdir) | ||
3502 | 2473 | |||
3503 | 2474 | try: | ||
3504 | 2475 | yield | ||
3505 | 2476 | except: | ||
3506 | 2477 | raise | ||
3507 | 2478 | finally: | ||
3508 | 2479 | os.chdir(oldcwd) | ||
3509 | 2480 | |||
3510 | 2481 | self.git_run(['worktree', 'prune']) | ||
3511 | 2482 | |||
3512 | 2483 | def tree_hash_after_command(self, commitish, cmd): | ||
3513 | 2484 | with self.temporary_worktree(commitish): | ||
3514 | 2485 | try: | ||
3515 | 2486 | run(cmd) | ||
3516 | 2487 | except CalledProcessError as e: | ||
3517 | 2488 | logging.error("Unable to execute `%s`", ' '.join(cmd)) | ||
3518 | 2489 | raise | ||
3519 | 2490 | |||
3520 | 2491 | run(["git", "add", "-f", ".",]) | ||
3521 | 2492 | tree_hash, _ = run(["git", "write-tree"]) | ||
3522 | 2493 | return tree_hash.strip() | ||
3523 | 2494 | |||
3524 | 2495 | def tree_hash_subpath(self, treeish_string, path): | ||
3525 | 2496 | """Get the tree hash for path at a given treeish | ||
3526 | 2497 | |||
3527 | 2498 | Arguments: | ||
3528 | 2499 | @treeish_string: a string Git treeish | ||
3529 | 2500 | @path: a string path present in @treeish_string | ||
3530 | 2501 | |||
3531 | 2502 | Returns: | ||
3532 | 2503 | String hash of Git tree corresponding to @path in @treeish_string | ||
3533 | 2504 | """ | ||
3534 | 2505 | tree_obj = self.raw_repo.revparse_single(treeish_string).peel( | ||
3535 | 2506 | pygit2.Tree | ||
3536 | 2507 | ) | ||
3537 | 2508 | return str(tree_obj[path].id) | ||
3538 | 2509 | |||
3539 | 2510 | def paths_are_identical(self, treeish1_string, treeish2_string, path): | ||
3540 | 2511 | """Determine if a given path is the same in two treeishs | ||
3541 | 2512 | |||
3542 | 2513 | Arguments: | ||
3543 | 2514 | @treeish1_string: a string Git treeish | ||
3544 | 2515 | @treeish2_string: a string Git treeish | ||
3545 | 2516 | @path: a string path present in @treeish1_string and @treeish2_string | ||
3546 | 2517 | |||
3547 | 2518 | Returns: | ||
3548 | 2519 | True, if @path is the same in @treeish1_string and @treeish2_string | ||
3549 | 2520 | False, otherwise | ||
3550 | 2521 | """ | ||
3551 | 2522 | try: | ||
3552 | 2523 | subpath_tree_hash1 = self.tree_hash_subpath( | ||
3553 | 2524 | treeish1_string, | ||
3554 | 2525 | path, | ||
3555 | 2526 | ) | ||
3556 | 2527 | except KeyError: | ||
3557 | 2528 | # if the path does not exist in treeish | ||
3558 | 2529 | subpath_tree_hash1 = None | ||
3559 | 2530 | try: | ||
3560 | 2531 | subpath_tree_hash2 = self.tree_hash_subpath( | ||
3561 | 2532 | treeish2_string, | ||
3562 | 2533 | path, | ||
3563 | 2534 | ) | ||
3564 | 2535 | except KeyError: | ||
3565 | 2536 | subpath_tree_hash2 = None | ||
3566 | 2537 | |||
3567 | 2538 | return subpath_tree_hash1 == subpath_tree_hash2 | ||
3568 | 2539 | |||
3569 | 2540 | @lru_cache() | ||
3570 | 2541 | def quilt_env(self, treeish): | ||
3571 | 2542 | """Return a suitable environment for running quilt. | ||
3572 | 2543 | |||
3573 | 2544 | This varies depending on the supplied commit since both | ||
3574 | 2545 | debian/patches/series and debian/patches/debian.series may be valid. | ||
3575 | 2546 | See dpkg-source(1) for details. | ||
3576 | 2547 | |||
3577 | 2548 | The returned environment includes all necessary variables by | ||
3578 | 2549 | combining self.env with the needed quilt-specific environment. | ||
3579 | 2550 | |||
3580 | 2551 | :param pygit.Object treeish: object that peels to the pygit2.Tree on | ||
3581 | 2552 | which quilt will operate. | ||
3582 | 2553 | :rtype: dict | ||
3583 | 2554 | :returns: an environment suitable for running quilt. | ||
3584 | 2555 | """ | ||
3585 | 2556 | env = self.env.copy() | ||
3586 | 2557 | env.update(quilt_env(self.raw_repo, treeish)) | ||
3587 | 2558 | return env | ||
3588 | 2559 | |||
3589 | 2560 | def quilt_env_from_treeish_str(self, treeish_str): | ||
3590 | 2561 | """Return a suitable environment for running quilt. | ||
3591 | 2562 | |||
3592 | 2563 | This is a thin wrapper around quilt_env() that works with a treeish hex | ||
3593 | 2564 | string instead of directly with a treeish object. | ||
3594 | 2565 | |||
3595 | 2566 | :param str treeish_str: the hash of the tree on which quilt will | ||
3596 | 2567 | operate, in hex. | ||
3597 | 2568 | :rtype: dict | ||
3598 | 2569 | :returns: an environment suitable for running quilt. | ||
3599 | 2570 | """ | ||
3600 | 2571 | return self.quilt_env(self.raw_repo.get(treeish_str)) | ||
3601 | 2572 | |||
3602 | 2573 | def is_patches_applied(self, commit_hash, regenerated_pc_path): | ||
3603 | 2574 | # first see if quilt push -a would do anything to | ||
3604 | 2575 | # differentiate between applied and unapplied | ||
3605 | 2576 | with self.temporary_worktree(commit_hash): | ||
3606 | 2577 | try: | ||
3607 | 2578 | run_quilt( | ||
3608 | 2579 | ['push', '-a'], | ||
3609 | 2580 | env=self.quilt_env_from_treeish_str(commit_hash), | ||
3610 | 2581 | ) | ||
3611 | 2582 | # False if in an unapplied state, which is signified by | ||
3612 | 2583 | # successful push (rc=0) | ||
3613 | 2584 | return False | ||
3614 | 2585 | except CalledProcessError as e: | ||
3615 | 2586 | # non-zero return might be an error or it might mean no | ||
3616 | 2587 | # patches exist | ||
3617 | 2588 | if e.returncode == 1: | ||
3618 | 2589 | # an error may occur if we need to recreate the .pc | ||
3619 | 2590 | # first | ||
3620 | 2591 | try: | ||
3621 | 2592 | # the first quilt push may have created a .pc/ | ||
3622 | 2593 | shutil.rmtree('.pc') | ||
3623 | 2594 | shutil.copytree( | ||
3624 | 2595 | regenerated_pc_path, | ||
3625 | 2596 | '.pc', | ||
3626 | 2597 | ) | ||
3627 | 2598 | except FileNotFoundError: | ||
3628 | 2599 | # if there was no .pc directory, then the first | ||
3629 | 2600 | # quilt push failure was a real error | ||
3630 | 2601 | raise e | ||
3631 | 2602 | |||
3632 | 2603 | try: | ||
3633 | 2604 | run_quilt( | ||
3634 | 2605 | ['push', '-a'], | ||
3635 | 2606 | env=self.quilt_env_from_treeish_str(commit_hash), | ||
3636 | 2607 | ) | ||
3637 | 2608 | # False if in an unapplied state | ||
3638 | 2609 | return False | ||
3639 | 2610 | except CalledProcessError as e: | ||
3640 | 2611 | # True if in a patches-applied state or | ||
3641 | 2612 | # there are no patches to apply | ||
3642 | 2613 | if e.returncode == 2: | ||
3643 | 2614 | return True | ||
3644 | 2615 | else: | ||
3645 | 2616 | raise | ||
3646 | 2617 | # True if in a patches-applied state or there are | ||
3647 | 2618 | # no patches to apply | ||
3648 | 2619 | elif e.returncode == 2: | ||
3649 | 2620 | return True | ||
3650 | 2621 | else: | ||
3651 | 2622 | raise | ||
3652 | 2623 | |||
3653 | 2624 | def _maybe_quiltify_tree_hash(self, commit_hash): | ||
3654 | 2625 | """Determine if quiltify is needed and yield the quiltify'd tree hash | ||
3655 | 2626 | |||
3656 | 2627 | The imported patches-applied trees do not contain .pc | ||
3657 | 2628 | directories. To determine if an additional quilt patch is | ||
3658 | 2629 | necessary, we have to first regenerate the .pc directory, then | ||
3659 | 2630 | see if dpkg-source --commit generates a new quilt patch. | ||
3660 | 2631 | |||
3661 | 2632 | In order for dpkg-source --commit to function, we need to know | ||
3662 | 2633 | if the commit we are building is patches-unapplied or | ||
3663 | 2634 | patches-applied. In the latter case, we can build the commit | ||
3664 | 2635 | directly after copying the regenerated .pc directory. In the | ||
3665 | 2636 | former case, we do not want to copy the regenerated .pc | ||
3666 | 2637 | directory, as dpkg-source will do this for us, as it applies the | ||
3667 | 2638 | current patches. We determine if patches are applied or | ||
3668 | 2639 | unapplied by relying on `quilt push -a`'s exit status at | ||
3669 | 2640 | @commit_hash. | ||
3670 | 2641 | |||
3671 | 2642 | This is a common method used by multiple callers. | ||
3672 | 2643 | |||
3673 | 2644 | Arguments: | ||
3674 | 2645 | @commit_hash: a string Git commit hash | ||
3675 | 2646 | |||
3676 | 2647 | Returns: | ||
3677 | 2648 | String tree hash of quiltify'ing @commit_hash. | ||
3678 | 2649 | If no quiltify is needed, the return value is @commit_hash's | ||
3679 | 2650 | tree hash | ||
3680 | 2651 | """ | ||
3681 | 2652 | commit_tree_hash = str( | ||
3682 | 2653 | self.raw_repo.get(commit_hash).peel(pygit2.Tree).id | ||
3683 | 2654 | ) | ||
3684 | 2655 | if not is_3_0_quilt(self, commit_hash): | ||
3685 | 2656 | return commit_tree_hash | ||
3686 | 2657 | # the tarballs need to be in the parent directory from where | ||
3687 | 2658 | # we need the orig tarballs for quilt and dpkg-source | ||
3688 | 2659 | # but suppress any logging | ||
3689 | 2660 | logger = logging.getLogger() | ||
3690 | 2661 | oldLevel = logger.getEffectiveLevel() | ||
3691 | 2662 | logger.setLevel(logging.WARNING) | ||
3692 | 2663 | tarballs = gitubuntu.build.fetch_orig( | ||
3693 | 2664 | orig_search_list=gitubuntu.build.derive_orig_search_list_from_args( | ||
3694 | 2665 | self, | ||
3695 | 2666 | commitish=commit_hash, | ||
3696 | 2667 | for_merge=False, | ||
3697 | 2668 | no_pristine_tar=False, | ||
3698 | 2669 | ), | ||
3699 | 2670 | changelog=Changelog.from_treeish( | ||
3700 | 2671 | self.raw_repo, | ||
3701 | 2672 | self.raw_repo.get(commit_hash) | ||
3702 | 2673 | ), | ||
3703 | 2674 | ) | ||
3704 | 2675 | logger.setLevel(oldLevel) | ||
3705 | 2676 | # run dpkg-source | ||
3706 | 2677 | with tempfile.TemporaryDirectory() as tempdir: | ||
3707 | 2678 | # copy the generated tarballs | ||
3708 | 2679 | new_tarballs = [] | ||
3709 | 2680 | for tarball in tarballs: | ||
3710 | 2681 | new_tarballs.append(shutil.copy(tarball, tempdir)) | ||
3711 | 2682 | tarballs = new_tarballs | ||
3712 | 2683 | |||
3713 | 2684 | # create a nested temporary directory where we will recreate | ||
3714 | 2685 | # the .pc directory | ||
3715 | 2686 | with tempfile.TemporaryDirectory(prefix=tempdir+'/') as ttempdir: | ||
3716 | 2687 | oldcwd = os.getcwd() | ||
3717 | 2688 | os.chdir(ttempdir) | ||
3718 | 2689 | |||
3719 | 2690 | for tarball in tarballs: | ||
3720 | 2691 | run(['tar', '-x', '--strip-components=1', '-f', tarball,]) | ||
3721 | 2692 | |||
3722 | 2693 | # need the debia/patches | ||
3723 | 2694 | shutil.copytree( | ||
3724 | 2695 | os.path.join(self.local_dir, 'debian',), | ||
3725 | 2696 | 'debian', | ||
3726 | 2697 | ) | ||
3727 | 2698 | |||
3728 | 2699 | # generate the equivalent .pc directory | ||
3729 | 2700 | run_quilt( | ||
3730 | 2701 | ['push', '-a'], | ||
3731 | 2702 | env=self.quilt_env_from_treeish_str(commit_hash), | ||
3732 | 2703 | rcs=[2], | ||
3733 | 2704 | ) | ||
3734 | 2705 | |||
3735 | 2706 | regenerated_pc_path = os.path.join(tempdir, '.pc') | ||
3736 | 2707 | |||
3737 | 2708 | if os.path.exists(".pc"): | ||
3738 | 2709 | shutil.copytree( | ||
3739 | 2710 | '.pc', | ||
3740 | 2711 | regenerated_pc_path, | ||
3741 | 2712 | ) | ||
3742 | 2713 | |||
3743 | 2714 | os.chdir(oldcwd) | ||
3744 | 2715 | |||
3745 | 2716 | patches_applied = self.is_patches_applied( | ||
3746 | 2717 | commit_hash, | ||
3747 | 2718 | regenerated_pc_path, | ||
3748 | 2719 | ) | ||
3749 | 2720 | |||
3750 | 2721 | with self.temporary_worktree(commit_hash, prefix=tempdir+'/'): | ||
3751 | 2722 | # we only need to copy the generated .pc directory | ||
3752 | 2723 | # if we are building a patches-applied tree, which | ||
3753 | 2724 | # we determine by comparing our current tree hash to | ||
3754 | 2725 | # the generated tree hash. | ||
3755 | 2726 | if patches_applied: | ||
3756 | 2727 | try: | ||
3757 | 2728 | shutil.copytree( | ||
3758 | 2729 | regenerated_pc_path, | ||
3759 | 2730 | '.pc', | ||
3760 | 2731 | ) | ||
3761 | 2732 | except FileNotFoundError: | ||
3762 | 2733 | # it is possible no quilt patches exist yet | ||
3763 | 2734 | pass | ||
3764 | 2735 | |||
3765 | 2736 | fixup_patch_path = os.path.join( | ||
3766 | 2737 | 'debian', | ||
3767 | 2738 | 'patches', | ||
3768 | 2739 | 'git-ubuntu-fixup.patch' | ||
3769 | 2740 | ) | ||
3770 | 2741 | |||
3771 | 2742 | if os.path.exists(fixup_patch_path): | ||
3772 | 2743 | raise ValueError( | ||
3773 | 2744 | "A quilt patch with the name git-ubuntu-fixup.patch " | ||
3774 | 2745 | "already exists in %s" % commit_hash | ||
3775 | 2746 | ) | ||
3776 | 2747 | |||
3777 | 2748 | run( | ||
3778 | 2749 | [ | ||
3779 | 2750 | 'dpkg-source', | ||
3780 | 2751 | '--commit', | ||
3781 | 2752 | '.', | ||
3782 | 2753 | 'git-ubuntu-fixup.patch', | ||
3783 | 2754 | ], | ||
3784 | 2755 | env=self.quilt_env_from_treeish_str(commit_hash), | ||
3785 | 2756 | ) | ||
3786 | 2757 | |||
3787 | 2758 | # do not want the .pc directory in the resulting | ||
3788 | 2759 | # treeish | ||
3789 | 2760 | if os.path.exists('.pc'): | ||
3790 | 2761 | shutil.rmtree('.pc') | ||
3791 | 2762 | |||
3792 | 2763 | if os.path.exists(fixup_patch_path): | ||
3793 | 2764 | # dpkg-source uses debian/changelog to generate some | ||
3794 | 2765 | # fields. We do not know yet if the changelog has | ||
3795 | 2766 | # been updated, so elide that section of comments. | ||
3796 | 2767 | with open(fixup_patch_path, 'r+') as f: | ||
3797 | 2768 | for line in f: | ||
3798 | 2769 | if '---' in line: | ||
3799 | 2770 | break | ||
3800 | 2771 | text = """Description: git-ubuntu generated quilt fixup patch | ||
3801 | 2772 | TODO: Put a short summary on the line above and replace this paragraph | ||
3802 | 2773 | with a longer explanation of this change. Complete the meta-information | ||
3803 | 2774 | with other relevant fields (see below for details). | ||
3804 | 2775 | ---\n""" | ||
3805 | 2776 | for line in f: | ||
3806 | 2777 | text += line | ||
3807 | 2778 | f.seek(0) | ||
3808 | 2779 | f.write(text) | ||
3809 | 2780 | f.truncate() | ||
3810 | 2781 | |||
3811 | 2782 | # If we are on a patches-unapplied tree, then we | ||
3812 | 2783 | # need to reset ourselves back to @commit_hash with | ||
3813 | 2784 | # our new patch. | ||
3814 | 2785 | # In order for this to be buildable, we have to | ||
3815 | 2786 | # reverse-apply our patch, to undo the git-commited | ||
3816 | 2787 | # upstream changes. | ||
3817 | 2788 | if not patches_applied: | ||
3818 | 2789 | run(['git', 'add', '-f', 'debian/patches',]) | ||
3819 | 2790 | # if any patches add files that are untracked, | ||
3820 | 2791 | # remove them | ||
3821 | 2792 | run(['git', 'clean', '-f', '-d',]) | ||
3822 | 2793 | # reset all the other files to their status in | ||
3823 | 2794 | # HEAD | ||
3824 | 2795 | run(['git', 'checkout', commit_hash, '--', '*',]) | ||
3825 | 2796 | with open(fixup_patch_path, 'rb') as f: | ||
3826 | 2797 | run(['patch', '-Rp1',], input=f.read()) | ||
3827 | 2798 | |||
3828 | 2799 | return self.dir_to_tree(self.raw_repo, '.') | ||
3829 | 2800 | else: | ||
3830 | 2801 | return commit_tree_hash | ||
3831 | 2802 | |||
3832 | 2803 | def maybe_quiltify_tree_hash(self, commitish_string): | ||
3833 | 2804 | """Determine if quiltify is needed and return the quiltify'd tree hash | ||
3834 | 2805 | |||
3835 | 2806 | See _maybe_quiltify_tree_hash for details. | ||
3836 | 2807 | |||
3837 | 2808 | Arguments: | ||
3838 | 2809 | @commitish_string: a string Git commitish | ||
3839 | 2810 | |||
3840 | 2811 | Returns: | ||
3841 | 2812 | String tree hash of quiltify'ing @commitish_string. | ||
3842 | 2813 | If no quiltify is needed, the return value is the tree hash of | ||
3843 | 2814 | @commitish_string. | ||
3844 | 2815 | """ | ||
3845 | 2816 | commit_hash = str( | ||
3846 | 2817 | self.get_commitish(commitish_string).peel(pygit2.Commit).id | ||
3847 | 2818 | ) | ||
3848 | 2819 | return self._maybe_quiltify_tree_hash(commit_hash) | ||
3849 | 2820 | |||
3850 | 2821 | def maybe_changelogify_tree_hash(self, commit_hash): | ||
3851 | 2822 | """Determine if changelogify is needed and yield the changelogify'd tree hash | ||
3852 | 2823 | |||
3853 | 2824 | Given a commit, we need to detect if the user has inserted a | ||
3854 | 2825 | changelog entry relative to a published version for the purpose | ||
3855 | 2826 | of test builds. | ||
3856 | 2827 | |||
3857 | 2828 | Arguments: | ||
3858 | 2829 | @commit_hash: a string Git commit hash | ||
3859 | 2830 | |||
3860 | 2831 | Returns: | ||
3861 | 2832 | String tree hash of changelogify'ing @commit_hash. | ||
3862 | 2833 | If no changelogify is needed, the return value is the tree hash of | ||
3863 | 2834 | @commit_hash. | ||
3864 | 2835 | """ | ||
3865 | 2836 | commit_tree_hash = str( | ||
3866 | 2837 | self.raw_repo.get(commit_hash).peel(pygit2.Tree).id | ||
3867 | 2838 | ) | ||
3868 | 2839 | |||
3869 | 2840 | # one of these are the "base" pkg that @commit_hash's changes | ||
3870 | 2841 | # are based on | ||
3871 | 2842 | remote_tag = self.nearest_tag( | ||
3872 | 2843 | commit_hash, | ||
3873 | 2844 | prefix='pkg/', | ||
3874 | 2845 | ) | ||
3875 | 2846 | remote_branch = derive_target_branch( | ||
3876 | 2847 | self, | ||
3877 | 2848 | commit_hash, | ||
3878 | 2849 | ) | ||
3879 | 2850 | |||
3880 | 2851 | assert remote_tag or remote_branch | ||
3881 | 2852 | |||
3882 | 2853 | if remote_tag: | ||
3883 | 2854 | if remote_branch: | ||
3884 | 2855 | try: | ||
3885 | 2856 | self.git_run( | ||
3886 | 2857 | [ | ||
3887 | 2858 | 'merge-base', | ||
3888 | 2859 | '--is-ancestor', | ||
3889 | 2860 | remote_tag.name, | ||
3890 | 2861 | remote_branch, | ||
3891 | 2862 | ], | ||
3892 | 2863 | verbose_on_failure=False, | ||
3893 | 2864 | ) | ||
3894 | 2865 | parent_ref = remote_branch | ||
3895 | 2866 | except CalledProcessError as e: | ||
3896 | 2867 | if e.returncode == 1: | ||
3897 | 2868 | parent_ref = remote_tag.name | ||
3898 | 2869 | else: | ||
3899 | 2870 | raise | ||
3900 | 2871 | else: | ||
3901 | 2872 | parent_ref = remote_tag.name | ||
3902 | 2873 | else: | ||
3903 | 2874 | parent_ref = remote_branch | ||
3904 | 2875 | |||
3905 | 2876 | # If there are any changes relative to parent_ref but there are | ||
3906 | 2877 | # not any changelog changes, insert a snapshot changelog entry, | ||
3907 | 2878 | # starting from parent_ref, and return the resulting tree hash. | ||
3908 | 2879 | if str(self.raw_repo.revparse_single(parent_ref).peel( | ||
3909 | 2880 | pygit2.Tree | ||
3910 | 2881 | ).id) != commit_tree_hash and self.paths_are_identical( | ||
3911 | 2882 | parent_ref, | ||
3912 | 2883 | commit_hash, | ||
3913 | 2884 | 'debian/changelog', | ||
3914 | 2885 | ): | ||
3915 | 2886 | with self.temporary_worktree(commit_hash): | ||
3916 | 2887 | run_gbp( | ||
3917 | 2888 | [ | ||
3918 | 2889 | 'dch', | ||
3919 | 2890 | '--snapshot', | ||
3920 | 2891 | '--ignore-branch', | ||
3921 | 2892 | '--since=%s' % str(parent_ref), | ||
3922 | 2893 | ], | ||
3923 | 2894 | env=self.env, | ||
3924 | 2895 | ) | ||
3925 | 2896 | return self.dir_to_tree(self.raw_repo, '.') | ||
3926 | 2897 | |||
3927 | 2898 | # otherwise, return @commit_hash's tree hash | ||
3928 | 2899 | return commit_tree_hash | ||
3929 | 2900 | |||
3930 | 2901 | def quiltify_and_changelogify_tree_hash(self, commitish_string): | ||
3931 | 2902 | """Given a commitish, possibly quiltify and changelogify its tree | ||
3932 | 2903 | |||
3933 | 2904 | Definitions: | ||
3934 | 2905 | quiltify: generate a quilt patch from untracked upstream | ||
3935 | 2906 | changes | ||
3936 | 2907 | changelogify: generate a snapshot changelog entry if any | ||
3937 | 2908 | changes exist, and no new changelog entry yet exists | ||
3938 | 2909 | |||
3939 | 2910 | Arguments: | ||
3940 | 2911 | @commitish_string: string Git commitish | ||
3941 | 2912 | |||
3942 | 2913 | Returns: | ||
3943 | 2914 | string Git tree hash of quiltify-ing and changelogify-ing | ||
3944 | 2915 | @commitish_string, if needed | ||
3945 | 2916 | if neither quiltify or changelogify are needed, return | ||
3946 | 2917 | @commitish_string's tree hash | ||
3947 | 2918 | """ | ||
3948 | 2919 | commit_hash = str( | ||
3949 | 2920 | self.get_commitish(commitish_string).peel(pygit2.Commit).id | ||
3950 | 2921 | ) | ||
3951 | 2922 | quiltify_tree_hash = self._maybe_quiltify_tree_hash(commit_hash) | ||
3952 | 2923 | changelogify_tree_hash = self.maybe_changelogify_tree_hash(commit_hash) | ||
3953 | 2924 | |||
3954 | 2925 | quiltify_tree_obj = self.raw_repo.get(quiltify_tree_hash) | ||
3955 | 2926 | changelogify_tree_obj = self.raw_repo.get(changelogify_tree_hash) | ||
3956 | 2927 | |||
3957 | 2928 | # There are multiple ways to solve this problem, but the | ||
3958 | 2929 | # simplest is to use a TreeBuilder to merge the quiltify tree | ||
3959 | 2930 | # with the changelog from the changelogify tree | ||
3960 | 2931 | # top-level TreeBuilder | ||
3961 | 2932 | tb = self.raw_repo.TreeBuilder(quiltify_tree_obj) | ||
3962 | 2933 | te = tb.get('debian') | ||
3963 | 2934 | # TreeBuilder for debian/ | ||
3964 | 2935 | dtb = self.raw_repo.TreeBuilder(self.raw_repo.get(te.id)) | ||
3965 | 2936 | dtb.insert( # does not take kwargs | ||
3966 | 2937 | 'changelog', # name | ||
3967 | 2938 | changelogify_tree_obj['debian/changelog'].oid, # oid | ||
3968 | 2939 | pygit2.GIT_FILEMODE_BLOB, # attr | ||
3969 | 2940 | ) | ||
3970 | 2941 | # insert can replace | ||
3971 | 2942 | tb.insert( # does not take kwargs | ||
3972 | 2943 | 'debian', # name | ||
3973 | 2944 | dtb.write(), # oid | ||
3974 | 2945 | pygit2.GIT_FILEMODE_TREE, # attr | ||
3975 | 2946 | ) | ||
3976 | 2947 | return str(tb.write()) | ||
3977 | 2948 | |||
3978 | 2949 | def find_ubuntu_merge_base( | ||
3979 | 2950 | self, | ||
3980 | 2951 | ubuntu_commitish, | ||
3981 | 2952 | ): | ||
3982 | 2953 | """Find the Ubuntu merge point for a given Ubuntu version | ||
3983 | 2954 | |||
3984 | 2955 | :param ubuntu_commitish str A commitish describing the latest | ||
3985 | 2956 | Ubuntu commit | ||
3986 | 2957 | |||
3987 | 2958 | :rtype str | ||
3988 | 2959 | :returns Commit hash of import of Debian version | ||
3989 | 2960 | @ubuntu_commitish is based on. The imported Debian version | ||
3990 | 2961 | must be an ancestor of @ubuntu_commitish. If no suitable | ||
3991 | 2962 | commit is found, an empty string is returned. | ||
3992 | 2963 | """ | ||
3993 | 2964 | merge_base_tag = None | ||
3994 | 2965 | |||
3995 | 2966 | # obtain the nearest imported Debian version per the changelog | ||
3996 | 2967 | for version in self.get_all_changelog_versions_from_treeish( | ||
3997 | 2968 | ubuntu_commitish, | ||
3998 | 2969 | ): | ||
3999 | 2970 | # extract corresponding Debian version | ||
4000 | 2971 | debian_parts, _ = gitubuntu.versioning.split_version_string( | ||
4001 | 2972 | version | ||
4002 | 2973 | ) | ||
4003 | 2974 | expected_debian_version = "".join(debian_parts) | ||
4004 | 2975 | |||
4005 | 2976 | # We do not currently handle the case of a Debian version | ||
4006 | 2977 | # being reimported. I think the proper way to support that | ||
4007 | 2978 | # would be to add a parameter to `git ubuntu merge` for the | ||
4008 | 2979 | # user to tell us which reimport tag is the one the Ubuntu | ||
4009 | 2980 | # delta is based on. | ||
4010 | 2981 | merge_base_tag = self.get_import_tag( | ||
4011 | 2982 | expected_debian_version, | ||
4012 | 2983 | 'pkg', | ||
4013 | 2984 | ) | ||
4014 | 2985 | |||
4015 | 2986 | if merge_base_tag: | ||
4016 | 2987 | assert not self.get_all_reimport_tags( | ||
4017 | 2988 | expected_debian_version, | ||
4018 | 2989 | 'pkg', | ||
4019 | 2990 | ) | ||
4020 | 2991 | break | ||
4021 | 2992 | |||
4022 | 2993 | if not merge_base_tag: | ||
4023 | 2994 | logging.error( | ||
4024 | 2995 | "Unable to find an import tag for any Debian version " | ||
4025 | 2996 | "in %s:debian/changelog.", | ||
4026 | 2997 | ubuntu_commitish, | ||
4027 | 2998 | ) | ||
4028 | 2999 | return '' | ||
4029 | 3000 | |||
4030 | 3001 | merge_base_commit_hash = str(merge_base_tag.peel(pygit2.Commit).id) | ||
4031 | 3002 | |||
4032 | 3003 | try: | ||
4033 | 3004 | self.git_run( | ||
4034 | 3005 | [ | ||
4035 | 3006 | 'merge-base', | ||
4036 | 3007 | '--is-ancestor', | ||
4037 | 3008 | merge_base_commit_hash, | ||
4038 | 3009 | ubuntu_commitish, | ||
4039 | 3010 | ], | ||
4040 | 3011 | verbose_on_failure=False, | ||
4041 | 3012 | ) | ||
4042 | 3013 | except CalledProcessError as e: | ||
4043 | 3014 | if e.returncode != 1: | ||
4044 | 3015 | raise | ||
4045 | 3016 | logging.error( | ||
4046 | 3017 | "Found an import tag for %s (commit: %s), but it is " | ||
4047 | 3018 | "not an ancestor of %s.", | ||
4048 | 3019 | expected_debian_version, | ||
4049 | 3020 | merge_base_commit_hash, | ||
4050 | 3021 | ubuntu_commitish, | ||
4051 | 3022 | ) | ||
4052 | 3023 | return '' | ||
4053 | 3024 | |||
4054 | 3025 | return merge_base_commit_hash | ||
4055 | 3026 | >>>>>>> gitubuntu/git_repository.py | ||
4056 | diff --git a/gitubuntu/git_repository_test.py b/gitubuntu/git_repository_test.py | |||
4057 | 0 | new file mode 100644 | 3027 | new file mode 100644 |
4058 | index 0000000..72055f9 | |||
4059 | --- /dev/null | |||
4060 | +++ b/gitubuntu/git_repository_test.py | |||
4061 | @@ -0,0 +1,1191 @@ | |||
4062 | 1 | <<<<<<< gitubuntu/git_repository_test.py | ||
4063 | 2 | ======= | ||
4064 | 3 | import copy | ||
4065 | 4 | import datetime | ||
4066 | 5 | import itertools | ||
4067 | 6 | import os | ||
4068 | 7 | import pkg_resources | ||
4069 | 8 | import shutil | ||
4070 | 9 | import tempfile | ||
4071 | 10 | import unittest | ||
4072 | 11 | import unittest.mock | ||
4073 | 12 | |||
4074 | 13 | import pygit2 | ||
4075 | 14 | import pytest | ||
4076 | 15 | |||
4077 | 16 | import gitubuntu.git_repository as target | ||
4078 | 17 | from gitubuntu.git_repository import HeadInfoItem | ||
4079 | 18 | from gitubuntu.repo_builder import ( | ||
4080 | 19 | Blob, | ||
4081 | 20 | Commit, | ||
4082 | 21 | Placeholder, | ||
4083 | 22 | Repo, | ||
4084 | 23 | SourceTree, | ||
4085 | 24 | Symlink, | ||
4086 | 25 | Tree, | ||
4087 | 26 | ) | ||
4088 | 27 | from gitubuntu.source_builder import Source, SourceSpec | ||
4089 | 28 | import gitubuntu.spec | ||
4090 | 29 | from gitubuntu.test_fixtures import ( | ||
4091 | 30 | repo, | ||
4092 | 31 | pygit2_repo, | ||
4093 | 32 | ) | ||
4094 | 33 | from gitubuntu.test_util import get_test_changelog | ||
4095 | 34 | |||
4096 | 35 | |||
4097 | 36 | @pytest.mark.parametrize('same_remote_branch_names, different_remote_branch_names, expected', [ | ||
4098 | 37 | ([], [], ''), | ||
4099 | 38 | (['pkg/ubuntu/xenial-devel',], [], 'pkg/ubuntu/xenial-devel'), | ||
4100 | 39 | (['pkg/ubuntu/xenial-security',], [], 'pkg/ubuntu/xenial-security'), | ||
4101 | 40 | (['pkg/ubuntu/xenial-updates', 'pkg/ubuntu/xenial-devel'], [], | ||
4102 | 41 | 'pkg/ubuntu/xenial-devel' | ||
4103 | 42 | ), | ||
4104 | 43 | ([], ['pkg/ubuntu/xenial-updates', 'pkg/ubuntu/xenial-devel'], | ||
4105 | 44 | '' | ||
4106 | 45 | ), | ||
4107 | 46 | (['pkg/ubuntu/zesty-devel', 'pkg/ubuntu/zesty-proposed', 'pkg/ubuntu/devel'], [], 'pkg/ubuntu/devel'), | ||
4108 | 47 | ]) | ||
4109 | 48 | def test__derive_target_branch_string(same_remote_branch_names, | ||
4110 | 49 | different_remote_branch_names, expected | ||
4111 | 50 | ): | ||
4112 | 51 | remote_branch_objects = [] | ||
4113 | 52 | for branch_name in same_remote_branch_names: | ||
4114 | 53 | b = unittest.mock.Mock() | ||
4115 | 54 | b.peel(pygit2.Tree).id = unittest.mock.sentinel.same_id | ||
4116 | 55 | b.branch_name = branch_name | ||
4117 | 56 | remote_branch_objects.append(b) | ||
4118 | 57 | for branch_name in different_remote_branch_names: | ||
4119 | 58 | b = unittest.mock.Mock() | ||
4120 | 59 | b.peel(pygit2.Tree).id = object() # need a different sentinel for each | ||
4121 | 60 | b.branch_name = branch_name | ||
4122 | 61 | remote_branch_objects.append(b) | ||
4123 | 62 | target_branch_string = target._derive_target_branch_string( | ||
4124 | 63 | remote_branch_objects | ||
4125 | 64 | ) | ||
4126 | 65 | assert target_branch_string == expected | ||
4127 | 66 | |||
4128 | 67 | |||
4129 | 68 | @pytest.mark.parametrize('changelog_name, expected', [ | ||
4130 | 69 | ('test_versions_1', ['1.0', None]), | ||
4131 | 70 | ('test_versions_2', ['2.0', '1.0']), | ||
4132 | 71 | ('test_versions_3', ['4.0', '3.0']), | ||
4133 | 72 | ('test_versions_unknown', ['ss-970814-1', None]), | ||
4134 | 73 | ]) | ||
4135 | 74 | def test_changelog_versions(changelog_name, expected): | ||
4136 | 75 | test_changelog = get_test_changelog(changelog_name) | ||
4137 | 76 | assert [test_changelog.version, test_changelog.previous_version] == expected | ||
4138 | 77 | |||
4139 | 78 | |||
4140 | 79 | @pytest.mark.parametrize('changelog_name, expected', [ | ||
4141 | 80 | ('test_versions_unknown', ['ss-970814-1',]), | ||
4142 | 81 | ]) | ||
4143 | 82 | def test_changelog_all_versions(changelog_name, expected): | ||
4144 | 83 | test_changelog = get_test_changelog(changelog_name) | ||
4145 | 84 | assert test_changelog.all_versions == expected | ||
4146 | 85 | |||
4147 | 86 | |||
4148 | 87 | def test_changelog_distribution(): | ||
4149 | 88 | test_changelog = get_test_changelog('test_distribution') | ||
4150 | 89 | assert test_changelog.distribution == 'xenial' | ||
4151 | 90 | |||
4152 | 91 | |||
4153 | 92 | def test_changelog_date(): | ||
4154 | 93 | test_changelog = get_test_changelog('test_date_1') | ||
4155 | 94 | assert test_changelog.date == 'Mon, 12 May 2016 08:14:34 -0700' | ||
4156 | 95 | test_changelog = get_test_changelog('test_date_2') | ||
4157 | 96 | assert test_changelog.date == 'Mon, 12 May 2016 08:14:34 -0700' | ||
4158 | 97 | |||
4159 | 98 | |||
4160 | 99 | @pytest.mark.parametrize('changelog_name, expected', [ | ||
4161 | 100 | ('test_maintainer_1', 'Test Maintainer <test-maintainer@donotmail.com>'), | ||
4162 | 101 | ('test_maintainer_2', '<test-maintainer@donotmail.com>'), | ||
4163 | 102 | ]) | ||
4164 | 103 | def test_changelog_maintainer(changelog_name, expected): | ||
4165 | 104 | test_changelog = get_test_changelog(changelog_name) | ||
4166 | 105 | assert test_changelog.maintainer == expected | ||
4167 | 106 | |||
4168 | 107 | |||
4169 | 108 | def test_changelog_maintainer_invalid(): | ||
4170 | 109 | with pytest.raises(ValueError): | ||
4171 | 110 | test_changelog = get_test_changelog('test_maintainer_3') | ||
4172 | 111 | test_changelog.maintainer | ||
4173 | 112 | |||
4174 | 113 | |||
4175 | 114 | def test_changelog_multiple_angle_brackets(): | ||
4176 | 115 | """An email address with extra angle brackets should still parse""" | ||
4177 | 116 | test_changelog = get_test_changelog('test_multiple_angle_brackets') | ||
4178 | 117 | assert test_changelog.git_authorship()[1] == 'micah@debian.org' | ||
4179 | 118 | |||
4180 | 119 | |||
4181 | 120 | @pytest.mark.parametrize(['input_date_string', 'expected_result'], [ | ||
4182 | 121 | # The normal complete form | ||
4183 | 122 | ('Mon, 12 May 2016 08:14:34 -0700', (2016, 5, 12, 8, 14, 34, -7)), | ||
4184 | 123 | # Day of week missing, such as in: | ||
4185 | 124 | # datefudge 1.12 | ||
4186 | 125 | ('12 May 2016 08:14:34 -0700', (2016, 5, 12, 8, 14, 34, -7)), | ||
4187 | 126 | # Full (not abbreviated) month name, such as in: | ||
4188 | 127 | # dnsmasq 2.32-2 | ||
4189 | 128 | # dropbear 0.42-1 | ||
4190 | 129 | # e2fsprogs 1.42.11-1 | ||
4191 | 130 | # efibootmgr 0.5.4-7 | ||
4192 | 131 | # hunspell-br 0.11-1 | ||
4193 | 132 | # kubuntu-default-settings 1:6.06-22 | ||
4194 | 133 | # libvformat 1.13-4 | ||
4195 | 134 | ('12 June 2016 08:14:34 -0700', (2016, 6, 12, 8, 14, 34, -7)), | ||
4196 | 135 | # Full (not abbreviated) day of week name, such as in: | ||
4197 | 136 | # logcheck 1.2.22a | ||
4198 | 137 | ('Thursday, 15 May 2016 08:14:34 -0700', (2016, 5, 15, 8, 14, 34, -7)), | ||
4199 | 138 | # Part-abbreviated day of week name, such as in: | ||
4200 | 139 | # kubuntu-meta 1.76 | ||
4201 | 140 | ('Thur, 15 May 2016 08:14:34 -0700', (2016, 5, 15, 8, 14, 34, -7)), | ||
4202 | 141 | ]) | ||
4203 | 142 | def test_parse_changelog_date(input_date_string, expected_result): | ||
4204 | 143 | """_parse_changelog_date should parse a basic date string correctly | ||
4205 | 144 | |||
4206 | 145 | :param str input_date_string: the timestamp part of the changelog signoff | ||
4207 | 146 | line | ||
4208 | 147 | :param tuple(int, int, int, int, int, int, int) expected_result: the | ||
4209 | 148 | expected parse result in (year, month, day, hour, minute, second, | ||
4210 | 149 | timezone_offset_in_hours) form. The actual expected result needs to be | ||
4211 | 150 | a datetime.datetime object; to avoid duplication in test parameters | ||
4212 | 151 | this will be instantiated within the test. | ||
4213 | 152 | """ | ||
4214 | 153 | actual_result = target.Changelog._parse_changelog_date(input_date_string) | ||
4215 | 154 | expected_result_datetime = datetime.datetime( | ||
4216 | 155 | *expected_result[:6], | ||
4217 | 156 | tzinfo=datetime.timezone(datetime.timedelta(hours=expected_result[6])), | ||
4218 | 157 | ) | ||
4219 | 158 | assert actual_result == expected_result_datetime | ||
4220 | 159 | |||
4221 | 160 | |||
4222 | 161 | @pytest.mark.parametrize(['input_date_string'], [ | ||
4223 | 162 | ('Mon, 30 Feb 2020 15:50:58 +0200',), # ghostscript 9.50~dfsg-5ubuntu4 | ||
4224 | 163 | ('Mon, 03 Sep 2018 00:43:25 -7000',), # lxqt-config 0.13.0-0ubuntu4 | ||
4225 | 164 | ('Tue, 17 May 2008 10:93:55 -0500',), # iscsitarget | ||
4226 | 165 | # 0.4.15+svn148-2.1ubuntu1 | ||
4227 | 166 | ('Monu, 22 Jan 2007 22:10:50 -0500',), # mail-spf-perl 2.004-0ubuntu1 | ||
4228 | 167 | ('Wed, 29 Augl 2007 16:14:11 +0200',), # nut 2.2.0-2 | ||
4229 | 168 | ]) | ||
4230 | 169 | def test_changelog_date_parse_errors(input_date_string): | ||
4231 | 170 | """_parse_changelog_date should raise ValueError on illegal dates | ||
4232 | 171 | |||
4233 | 172 | :param str input_date_string: the timestamp part of the changelog signoff | ||
4234 | 173 | line | ||
4235 | 174 | """ | ||
4236 | 175 | with pytest.raises(ValueError): | ||
4237 | 176 | target.Changelog._parse_changelog_date(input_date_string) | ||
4238 | 177 | |||
4239 | 178 | |||
4240 | 179 | @pytest.mark.parametrize( | ||
4241 | 180 | 'changelog_name, name, email, epoch_seconds, offset', [ | ||
4242 | 181 | ( | ||
4243 | 182 | 'test_maintainer_1', | ||
4244 | 183 | 'Test Maintainer', | ||
4245 | 184 | 'test-maintainer@donotmail.com', | ||
4246 | 185 | 0, | ||
4247 | 186 | 0, | ||
4248 | 187 | ), | ||
4249 | 188 | ( | ||
4250 | 189 | 'test_maintainer_2', | ||
4251 | 190 | 'Unnamed', # git won't handle empty names; see the spec | ||
4252 | 191 | 'test-maintainer@donotmail.com', | ||
4253 | 192 | 0, | ||
4254 | 193 | 0, | ||
4255 | 194 | ), | ||
4256 | 195 | ( | ||
4257 | 196 | 'test_date_1', | ||
4258 | 197 | 'Test Maintainer', | ||
4259 | 198 | 'test-maintainer@donotmail.com', | ||
4260 | 199 | 1463066074, | ||
4261 | 200 | -420, | ||
4262 | 201 | ), | ||
4263 | 202 | ( | ||
4264 | 203 | 'test_date_2', | ||
4265 | 204 | 'Test Maintainer', | ||
4266 | 205 | 'test-maintainer@donotmail.com', | ||
4267 | 206 | 1463066074, | ||
4268 | 207 | -420, | ||
4269 | 208 | ), | ||
4270 | 209 | ( | ||
4271 | 210 | 'maintainer_name_leading_space', | ||
4272 | 211 | 'Test Maintainer', | ||
4273 | 212 | 'test-maintainer@example.com', | ||
4274 | 213 | 0, | ||
4275 | 214 | 0, | ||
4276 | 215 | ), | ||
4277 | 216 | ( | ||
4278 | 217 | 'maintainer_name_trailing_space', | ||
4279 | 218 | 'Test Maintainer', | ||
4280 | 219 | 'test-maintainer@example.com', | ||
4281 | 220 | 0, | ||
4282 | 221 | 0, | ||
4283 | 222 | ), | ||
4284 | 223 | ( | ||
4285 | 224 | 'maintainer_name_inner_space', | ||
4286 | 225 | 'Test Maintainer', | ||
4287 | 226 | 'test-maintainer@example.com', | ||
4288 | 227 | 0, | ||
4289 | 228 | 0, | ||
4290 | 229 | ), | ||
4291 | 230 | ]) | ||
4292 | 231 | def test_changelog_authorship( | ||
4293 | 232 | changelog_name, | ||
4294 | 233 | name, | ||
4295 | 234 | email, | ||
4296 | 235 | epoch_seconds, | ||
4297 | 236 | offset, | ||
4298 | 237 | ): | ||
4299 | 238 | result = get_test_changelog(changelog_name).git_authorship() | ||
4300 | 239 | assert result == (name, email, epoch_seconds, offset) | ||
4301 | 240 | |||
4302 | 241 | |||
4303 | 242 | def test_changelog_utf8(): | ||
4304 | 243 | test_changelog = get_test_changelog('test_utf8_error') | ||
4305 | 244 | assert test_changelog.version == '1.0.3-2' | ||
4306 | 245 | |||
4307 | 246 | |||
4308 | 247 | def test_changelog_duplicate(): | ||
4309 | 248 | # Changelog.all_versions should successfully return without an assertion | ||
4310 | 249 | |||
4311 | 250 | # Xenial's dpkg-parsechangelog eliminates duplicate versions. Bionic's | ||
4312 | 251 | # dpkg-parsechangelog does not. We rely on the behaviour of | ||
4313 | 252 | # dpkg-parsechangelog from Bionic, where this test passes. The test fails | ||
4314 | 253 | # when using Xenial's dpkg-parsechangelog, where its behaviour doesn't | ||
4315 | 254 | # match our assumptions elsewhere. | ||
4316 | 255 | |||
4317 | 256 | # -with-extra includes an extra changelog entry at the end. This is | ||
4318 | 257 | # currently needed to trip the assertion because it truncates the longer | ||
4319 | 258 | # list before its comparison. This will get fixed in a subsequent commit, | ||
4320 | 259 | # but using it here ensures that this test will correctly trip regardless | ||
4321 | 260 | # of the presence of that unrelated bug. | ||
4322 | 261 | test_changelog = get_test_changelog('duplicate-version-with-extra') | ||
4323 | 262 | test_changelog.all_versions | ||
4324 | 263 | |||
4325 | 264 | |||
4326 | 265 | def test_changelog_all_versions_assertion_mismatched_length(): | ||
4327 | 266 | # if Changelog.all_versions finds that self._changelog.versions mismatches | ||
4328 | 267 | # self._shell_all_versions, it is supposed to raise an assertion. Here is | ||
4329 | 268 | # an edge case where at one point in development it did not. We fake both | ||
4330 | 269 | # _changelog.versions and _shell_all_versions to an edge case where they | ||
4331 | 270 | # mismatch. | ||
4332 | 271 | with unittest.mock.patch( | ||
4333 | 272 | 'gitubuntu.git_repository.Changelog._shell_all_versions', | ||
4334 | 273 | new_callable=unittest.mock.PropertyMock | ||
4335 | 274 | ) as mock_shell_all_versions: | ||
4336 | 275 | mock_shell_all_versions.return_value = ['a'] | ||
4337 | 276 | test_changelog = target.Changelog(b'') | ||
4338 | 277 | test_changelog._changelog = unittest.mock.Mock() | ||
4339 | 278 | test_changelog._changelog.versions = ['a', 'b'] | ||
4340 | 279 | with pytest.raises(target.ChangelogError): | ||
4341 | 280 | test_changelog.all_versions | ||
4342 | 281 | |||
4343 | 282 | |||
4344 | 283 | @pytest.mark.parametrize('tree_func', [ | ||
4345 | 284 | # The tree_func parameter is a function that accepts a mock Blob that is to | ||
4346 | 285 | # represent the changelog blob itself and returns a mock Tree with the mock | ||
4347 | 286 | # Blob embedded somewhere within it. The test function can then ensure that | ||
4348 | 287 | # follow_symlinks_to_blob can correctly find the changelog Blob given the | ||
4349 | 288 | # Tree. | ||
4350 | 289 | |||
4351 | 290 | # Of course this is only expected to work if, after checking out the Tree, | ||
4352 | 291 | # "cat debian/changelog" would work. But this allows us to test the various | ||
4353 | 292 | # permutations of symlink following in Trees that _are_ valid. | ||
4354 | 293 | |||
4355 | 294 | # Simple case | ||
4356 | 295 | lambda b: Tree({ | ||
4357 | 296 | 'debian': Tree({'changelog': b}), | ||
4358 | 297 | }), | ||
4359 | 298 | |||
4360 | 299 | # Symlink in debian/ | ||
4361 | 300 | lambda b: Tree({ | ||
4362 | 301 | 'debian': Tree({ | ||
4363 | 302 | 'changelog.real': b, | ||
4364 | 303 | 'changelog': Symlink('changelog.real'), | ||
4365 | 304 | }), | ||
4366 | 305 | }), | ||
4367 | 306 | |||
4368 | 307 | # Symlink to parent directory | ||
4369 | 308 | lambda b: Tree({ | ||
4370 | 309 | 'changelog': b, | ||
4371 | 310 | 'debian': Tree({ | ||
4372 | 311 | 'changelog': Symlink('../changelog'), | ||
4373 | 312 | }) | ||
4374 | 313 | }), | ||
4375 | 314 | |||
4376 | 315 | # Symlink to subdirectory | ||
4377 | 316 | lambda b: Tree({ | ||
4378 | 317 | 'debian': Tree({ | ||
4379 | 318 | 'changelog': Symlink('subdirectory/changelog'), | ||
4380 | 319 | 'subdirectory': Tree({'changelog': b}), | ||
4381 | 320 | }) | ||
4382 | 321 | }), | ||
4383 | 322 | |||
4384 | 323 | # debian/ itself is a symlink to a different directory | ||
4385 | 324 | lambda b: Tree({ | ||
4386 | 325 | 'pkg': Tree({'changelog': b}), | ||
4387 | 326 | 'debian': Symlink('pkg'), | ||
4388 | 327 | }) | ||
4389 | 328 | ]) | ||
4390 | 329 | def test_follow_symlinks_to_blob(pygit2_repo, tree_func): | ||
4391 | 330 | blob = Blob(b'') | ||
4392 | 331 | blob_id = blob.write(pygit2_repo) | ||
4393 | 332 | tree = pygit2_repo.get(tree_func(blob).write(pygit2_repo)) | ||
4394 | 333 | result_blob = target.follow_symlinks_to_blob( | ||
4395 | 334 | pygit2_repo, | ||
4396 | 335 | tree, | ||
4397 | 336 | 'debian/changelog', | ||
4398 | 337 | ) | ||
4399 | 338 | assert result_blob.id == blob_id | ||
4400 | 339 | |||
4401 | 340 | |||
4402 | 341 | @pytest.mark.parametrize('tree', [ | ||
4403 | 342 | Tree({}), | ||
4404 | 343 | Tree({'debian': Tree({})}), | ||
4405 | 344 | Tree({'debian': Tree({'changelog': Symlink('other')})}), | ||
4406 | 345 | Tree({'debian': Tree({'changelog': Symlink('../other')})}), | ||
4407 | 346 | ]) | ||
4408 | 347 | def test_follow_symlinks_to_blob_not_found(pygit2_repo, tree): | ||
4409 | 348 | pygit2_tree = pygit2_repo.get(tree.write(pygit2_repo)) | ||
4410 | 349 | with pytest.raises(KeyError): | ||
4411 | 350 | target.follow_symlinks_to_blob( | ||
4412 | 351 | pygit2_repo, | ||
4413 | 352 | pygit2_tree, | ||
4414 | 353 | 'debian/changelog', | ||
4415 | 354 | ) | ||
4416 | 355 | |||
4417 | 356 | |||
4418 | 357 | def test_renameable_dir_basename(tmpdir): | ||
4419 | 358 | p = tmpdir.join('foo') | ||
4420 | 359 | p.ensure() | ||
4421 | 360 | rd = target.RenameableDir(str(p)) | ||
4422 | 361 | assert rd.basename == 'foo' | ||
4423 | 362 | |||
4424 | 363 | |||
4425 | 364 | def test_renameable_dir_basename_setter(tmpdir): | ||
4426 | 365 | p = tmpdir.join('foo') | ||
4427 | 366 | p.ensure() | ||
4428 | 367 | rd = target.RenameableDir(str(p)) | ||
4429 | 368 | rd.basename = 'bar' | ||
4430 | 369 | assert rd.basename == 'bar' | ||
4431 | 370 | assert tmpdir.join('bar').check() | ||
4432 | 371 | |||
4433 | 372 | |||
4434 | 373 | def test_dot_git_match(tmpdir): | ||
4435 | 374 | for name in ['.git', 'git', '..git', 'other']: | ||
4436 | 375 | tmpdir.join(name).ensure() | ||
4437 | 376 | |||
4438 | 377 | result = set( | ||
4439 | 378 | x.basename | ||
4440 | 379 | for x in tmpdir.listdir( | ||
4441 | 380 | fil=lambda x: target._dot_git_match(str(x.basename)) | ||
4442 | 381 | ) | ||
4443 | 382 | ) | ||
4444 | 383 | assert result == set(['.git', '..git']) | ||
4445 | 384 | |||
4446 | 385 | |||
4447 | 386 | def test_renameable_dir_listdir(tmpdir): | ||
4448 | 387 | for name in ['.git', 'git', '..git', 'other']: | ||
4449 | 388 | tmpdir.join(name).ensure() | ||
4450 | 389 | rd = target.RenameableDir(str(tmpdir)) | ||
4451 | 390 | result = set(rd.listdir(target._dot_git_match)) | ||
4452 | 391 | assert result == set([ | ||
4453 | 392 | target.RenameableDir(os.path.join(str(tmpdir), '.git')), | ||
4454 | 393 | target.RenameableDir(os.path.join(str(tmpdir), '..git')), | ||
4455 | 394 | ]) | ||
4456 | 395 | |||
4457 | 396 | |||
4458 | 397 | def test_renamable_dir_recursive(tmpdir): | ||
4459 | 398 | a = tmpdir.join('foo') | ||
4460 | 399 | a.ensure_dir() | ||
4461 | 400 | b = tmpdir.join('bar') | ||
4462 | 401 | b.ensure() | ||
4463 | 402 | assert target.RenameableDir(str(a)).recursive | ||
4464 | 403 | assert not target.RenameableDir(str(b)).recursive | ||
4465 | 404 | |||
4466 | 405 | |||
4467 | 406 | def test_renameable_dir_recursive_symlink_directory(tmpdir): | ||
4468 | 407 | """A RenameableDir should not treat a broken symlink as recursive""" | ||
4469 | 408 | test_symlink = tmpdir.join('foo') | ||
4470 | 409 | nonexistent_file = tmpdir.join('nonexistent_file') | ||
4471 | 410 | test_symlink.mksymlinkto(nonexistent_file) | ||
4472 | 411 | assert not target.RenameableDir(str(test_symlink)).recursive | ||
4473 | 412 | |||
4474 | 413 | |||
4475 | 414 | def test_renameable_dir_str(tmpdir): | ||
4476 | 415 | p = tmpdir.join('foo') | ||
4477 | 416 | p.ensure() | ||
4478 | 417 | rd = target.RenameableDir(str(p)) | ||
4479 | 418 | assert str(rd) == os.path.join(str(tmpdir), 'foo') | ||
4480 | 419 | |||
4481 | 420 | |||
4482 | 421 | def test_renameable_dir_repr(tmpdir): | ||
4483 | 422 | p = tmpdir.join('foo') | ||
4484 | 423 | p.ensure() | ||
4485 | 424 | rd = target.RenameableDir(str(p)) | ||
4486 | 425 | assert repr(rd) == ("RenameableDir('%s/foo')" % str(tmpdir)) | ||
4487 | 426 | |||
4488 | 427 | |||
4489 | 428 | def test_renameable_dir_hash_eq(tmpdir): | ||
4490 | 429 | p1a = tmpdir.join('foo') | ||
4491 | 430 | p1b = tmpdir.join('foo') | ||
4492 | 431 | p2 = tmpdir.join('bar') | ||
4493 | 432 | |||
4494 | 433 | p1a.ensure() | ||
4495 | 434 | p2.ensure() | ||
4496 | 435 | |||
4497 | 436 | rd1a = target.RenameableDir(str(p1a)) | ||
4498 | 437 | rd1b = target.RenameableDir(str(p1b)) | ||
4499 | 438 | rd2 = target.RenameableDir(str(p2)) | ||
4500 | 439 | |||
4501 | 440 | assert rd1a == rd1b | ||
4502 | 441 | assert rd1a != rd2 | ||
4503 | 442 | |||
4504 | 443 | |||
4505 | 444 | def test_renameable_dir_must_exist(tmpdir): | ||
4506 | 445 | """A RenameableDir should reject a path that doesn't exist""" | ||
4507 | 446 | with pytest.raises(FileNotFoundError): | ||
4508 | 447 | target.RenameableDir(tmpdir.join('a')) | ||
4509 | 448 | |||
4510 | 449 | |||
4511 | 450 | def test_fake_renameable_dir_basename(): | ||
4512 | 451 | path = target.FakeRenameableDir('foo', None) | ||
4513 | 452 | assert path.basename == 'foo' | ||
4514 | 453 | |||
4515 | 454 | |||
4516 | 455 | def test_fake_renameable_dir_basename_setter(): | ||
4517 | 456 | path = target.FakeRenameableDir('foo', None) | ||
4518 | 457 | path.basename = 'bar' | ||
4519 | 458 | assert path.basename == 'bar' | ||
4520 | 459 | |||
4521 | 460 | |||
4522 | 461 | def test_fake_renameable_dir_listdir(): | ||
4523 | 462 | path = target.FakeRenameableDir(None, [ | ||
4524 | 463 | target.FakeRenameableDir('.git', None), | ||
4525 | 464 | target.FakeRenameableDir('git', None), | ||
4526 | 465 | target.FakeRenameableDir('..git', None), | ||
4527 | 466 | target.FakeRenameableDir('other', None), | ||
4528 | 467 | ]) | ||
4529 | 468 | result = set(x.basename for x in path.listdir(fil=target._dot_git_match)) | ||
4530 | 469 | assert result == set(['.git', '..git']) | ||
4531 | 470 | |||
4532 | 471 | |||
4533 | 472 | def test_fake_renameable_dir_recursive(): | ||
4534 | 473 | assert target.FakeRenameableDir(['foo'], []).recursive | ||
4535 | 474 | assert not target.FakeRenameableDir(['foo'], None).recursive | ||
4536 | 475 | |||
4537 | 476 | |||
4538 | 477 | def test_fake_renameable_dir_hash_eq(): | ||
4539 | 478 | variations = [ | ||
4540 | 479 | target.FakeRenameableDir(None, None), | ||
4541 | 480 | target.FakeRenameableDir(None, []), | ||
4542 | 481 | target.FakeRenameableDir('foo', []), | ||
4543 | 482 | target.FakeRenameableDir(None, [ | ||
4544 | 483 | target.FakeRenameableDir('foo', None)] | ||
4545 | 484 | ), | ||
4546 | 485 | target.FakeRenameableDir(None, [ | ||
4547 | 486 | target.FakeRenameableDir('foo', [ | ||
4548 | 487 | target.FakeRenameableDir('bar', None) | ||
4549 | 488 | ]), | ||
4550 | 489 | ]), | ||
4551 | 490 | ] | ||
4552 | 491 | for a, b in itertools.product(variations, variations): | ||
4553 | 492 | if a is b: | ||
4554 | 493 | assert a == b | ||
4555 | 494 | else: | ||
4556 | 495 | assert a != b | ||
4557 | 496 | |||
4558 | 497 | |||
4559 | 498 | def test_fake_renameable_dir_repr(): | ||
4560 | 499 | rd = target.FakeRenameableDir('foo', [target.FakeRenameableDir('bar', [])]) | ||
4561 | 500 | assert ( | ||
4562 | 501 | repr(rd) == "FakeRenameableDir('foo', [FakeRenameableDir('bar', [])])" | ||
4563 | 502 | ) | ||
4564 | 503 | |||
4565 | 504 | |||
4566 | 505 | @pytest.mark.parametrize('initial,expected', [ | ||
4567 | 506 | # Empty directory remains unchanged | ||
4568 | 507 | ( | ||
4569 | 508 | target.FakeRenameableDir(None, []), | ||
4570 | 509 | target.FakeRenameableDir(None, []), | ||
4571 | 510 | ), | ||
4572 | 511 | # Basic .git -> ..git escape | ||
4573 | 512 | ( | ||
4574 | 513 | target.FakeRenameableDir( | ||
4575 | 514 | None, | ||
4576 | 515 | [target.FakeRenameableDir('.git', None)], | ||
4577 | 516 | ), | ||
4578 | 517 | target.FakeRenameableDir( | ||
4579 | 518 | None, | ||
4580 | 519 | [target.FakeRenameableDir('..git', None)], | ||
4581 | 520 | ), | ||
4582 | 521 | ), | ||
4583 | 522 | # .git contains a .git | ||
4584 | 523 | ( | ||
4585 | 524 | target.FakeRenameableDir( | ||
4586 | 525 | None, | ||
4587 | 526 | [ | ||
4588 | 527 | target.FakeRenameableDir( | ||
4589 | 528 | '.git', | ||
4590 | 529 | [target.FakeRenameableDir('.git', None)], | ||
4591 | 530 | ) | ||
4592 | 531 | ], | ||
4593 | 532 | ), | ||
4594 | 533 | target.FakeRenameableDir( | ||
4595 | 534 | None, | ||
4596 | 535 | [ | ||
4597 | 536 | target.FakeRenameableDir( | ||
4598 | 537 | '..git', | ||
4599 | 538 | [target.FakeRenameableDir('..git', None)], | ||
4600 | 539 | ) | ||
4601 | 540 | ], | ||
4602 | 541 | ), | ||
4603 | 542 | ), | ||
4604 | 543 | # git remains unchanged | ||
4605 | 544 | ( | ||
4606 | 545 | target.FakeRenameableDir( | ||
4607 | 546 | None, | ||
4608 | 547 | [target.FakeRenameableDir('git', None)], | ||
4609 | 548 | ), | ||
4610 | 549 | target.FakeRenameableDir( | ||
4611 | 550 | None, | ||
4612 | 551 | [target.FakeRenameableDir('git', None)], | ||
4613 | 552 | ), | ||
4614 | 553 | ), | ||
4615 | 554 | # .git and ..git both exist | ||
4616 | 555 | ( | ||
4617 | 556 | target.FakeRenameableDir( | ||
4618 | 557 | None, | ||
4619 | 558 | [ | ||
4620 | 559 | target.FakeRenameableDir('.git', None), | ||
4621 | 560 | target.FakeRenameableDir('..git', None), | ||
4622 | 561 | ], | ||
4623 | 562 | ), | ||
4624 | 563 | target.FakeRenameableDir( | ||
4625 | 564 | None, | ||
4626 | 565 | [ | ||
4627 | 566 | target.FakeRenameableDir('..git', None), | ||
4628 | 567 | target.FakeRenameableDir('...git', None), | ||
4629 | 568 | ], | ||
4630 | 569 | ), | ||
4631 | 570 | ), | ||
4632 | 571 | # Ordinary directory contains a .git | ||
4633 | 572 | ( | ||
4634 | 573 | target.FakeRenameableDir( | ||
4635 | 574 | None, | ||
4636 | 575 | [ | ||
4637 | 576 | target.FakeRenameableDir( | ||
4638 | 577 | 'foo', | ||
4639 | 578 | [target.FakeRenameableDir('.git', None)], | ||
4640 | 579 | ) | ||
4641 | 580 | ] | ||
4642 | 581 | ), | ||
4643 | 582 | target.FakeRenameableDir( | ||
4644 | 583 | None, | ||
4645 | 584 | [ | ||
4646 | 585 | target.FakeRenameableDir( | ||
4647 | 586 | 'foo', | ||
4648 | 587 | [target.FakeRenameableDir('..git', None)], | ||
4649 | 588 | ) | ||
4650 | 589 | ] | ||
4651 | 590 | ), | ||
4652 | 591 | ), | ||
4653 | 592 | ]) | ||
4654 | 593 | def test_escape_dot_git(initial, expected): | ||
4655 | 594 | state = copy.deepcopy(initial) | ||
4656 | 595 | # Once escaped, we should get to what was expected | ||
4657 | 596 | target._escape_unescape_dot_git(state, target._EscapeDirection.ESCAPE) | ||
4658 | 597 | assert state == expected | ||
4659 | 598 | # Once unescaped, we should get back to where we started since the escaping | ||
4660 | 599 | # mechanism is lossless. | ||
4661 | 600 | target._escape_unescape_dot_git(state, target._EscapeDirection.UNESCAPE) | ||
4662 | 601 | assert state == initial | ||
4663 | 602 | |||
4664 | 603 | |||
4665 | 604 | def test_unescape_dot_git_raises(): | ||
4666 | 605 | """Test that unescaping something with '.git' raises an exception.""" | ||
4667 | 606 | with pytest.raises(RuntimeError): | ||
4668 | 607 | target._escape_unescape_dot_git( | ||
4669 | 608 | target.FakeRenameableDir( | ||
4670 | 609 | None, | ||
4671 | 610 | [target.FakeRenameableDir('.git', None)], | ||
4672 | 611 | ), | ||
4673 | 612 | direction=target._EscapeDirection.UNESCAPE, | ||
4674 | 613 | ) | ||
4675 | 614 | |||
4676 | 615 | |||
4677 | 616 | @pytest.mark.parametrize('direction', [ | ||
4678 | 617 | target._EscapeDirection.ESCAPE, | ||
4679 | 618 | target._EscapeDirection.UNESCAPE, | ||
4680 | 619 | ]) | ||
4681 | 620 | def test_escape_dot_git_ordering(direction): | ||
4682 | 621 | """Test that renames happen in the correct order. | ||
4683 | 622 | |||
4684 | 623 | ...git -> ....git must happen before ..git -> ...git to avoid a collision, | ||
4685 | 624 | and vice versa in the unescape case. | ||
4686 | 625 | """ | ||
4687 | 626 | # Avoid '.git' as it isn't valid in the reverse direction | ||
4688 | 627 | inner2 = target.FakeRenameableDir('..git', None) | ||
4689 | 628 | inner3 = target.FakeRenameableDir('...git', None) | ||
4690 | 629 | inputs = [inner2, inner3] | ||
4691 | 630 | if direction is target._EscapeDirection.ESCAPE: | ||
4692 | 631 | expected_order = [inner3, inner2] | ||
4693 | 632 | else: | ||
4694 | 633 | expected_order = [inner2, inner3] | ||
4695 | 634 | for given_order in [inputs, reversed(inputs)]: | ||
4696 | 635 | top = target.FakeRenameableDir(None, given_order) | ||
4697 | 636 | target._escape_unescape_dot_git(top, direction) | ||
4698 | 637 | assert all(x is y for x, y in zip(top._rename_record, expected_order)) | ||
4699 | 638 | |||
4700 | 639 | |||
4701 | 640 | def test_empty_dir_to_tree(pygit2_repo, tmpdir): | ||
4702 | 641 | tree_hash = target.GitUbuntuRepository.dir_to_tree( | ||
4703 | 642 | pygit2_repo, | ||
4704 | 643 | str(tmpdir), | ||
4705 | 644 | ) | ||
4706 | 645 | assert tree_hash == str(Tree({}).write(pygit2_repo)) | ||
4707 | 646 | |||
4708 | 647 | |||
4709 | 648 | def test_onefile_dir_to_tree(pygit2_repo, tmpdir): | ||
4710 | 649 | tmpdir.join('foo').write('bar') | ||
4711 | 650 | tree_hash = target.GitUbuntuRepository.dir_to_tree( | ||
4712 | 651 | pygit2_repo, | ||
4713 | 652 | str(tmpdir), | ||
4714 | 653 | ) | ||
4715 | 654 | assert tree_hash == str(Tree({'foo': Blob(b'bar')}).write(pygit2_repo)) | ||
4716 | 655 | |||
4717 | 656 | |||
4718 | 657 | def test_git_escape_dir_to_tree(pygit2_repo, tmpdir): | ||
4719 | 658 | tmpdir.mkdir('.git') | ||
4720 | 659 | tree_hash = target.GitUbuntuRepository.dir_to_tree( | ||
4721 | 660 | pygit2_repo, | ||
4722 | 661 | str(tmpdir), | ||
4723 | 662 | escape=True, | ||
4724 | 663 | ) | ||
4725 | 664 | assert tree_hash == str(Tree({'..git': Tree({})}).write(pygit2_repo)) | ||
4726 | 665 | |||
4727 | 666 | |||
4728 | 667 | @pytest.mark.parametrize('tree_data,expected_path', [ | ||
4729 | 668 | # Empty tree -> default | ||
4730 | 669 | (Tree({}), 'debian/patches/series'), | ||
4731 | 670 | |||
4732 | 671 | # Empty debian/patches directory -> default | ||
4733 | 672 | (Tree({'debian': Tree({'patches': Tree({})})}), 'debian/patches/series'), | ||
4734 | 673 | |||
4735 | 674 | # Only debian/patches/series -> that one | ||
4736 | 675 | ( | ||
4737 | 676 | Tree({'debian': Tree({'patches': Tree({'series': Blob(b'')})})}), | ||
4738 | 677 | 'debian/patches/series', | ||
4739 | 678 | ), | ||
4740 | 679 | |||
4741 | 680 | # Only debian/patches/debian.series -> that one | ||
4742 | 681 | ( | ||
4743 | 682 | Tree({'debian': Tree({'patches': Tree({ | ||
4744 | 683 | 'debian.series': Blob(b'') | ||
4745 | 684 | })})}), | ||
4746 | 685 | 'debian/patches/debian.series', | ||
4747 | 686 | ), | ||
4748 | 687 | |||
4749 | 688 | # Both -> debian.series | ||
4750 | 689 | ( | ||
4751 | 690 | Tree({'debian': Tree({'patches': Tree({ | ||
4752 | 691 | 'debian.series': Blob(b''), | ||
4753 | 692 | 'series': Blob(b''), | ||
4754 | 693 | })})}), | ||
4755 | 694 | 'debian/patches/debian.series', | ||
4756 | 695 | ), | ||
4757 | 696 | ]) | ||
4758 | 697 | def test_determine_quilt_series_path(pygit2_repo, tree_data, expected_path): | ||
4759 | 698 | tree_obj = pygit2_repo.get(tree_data.write(pygit2_repo)) | ||
4760 | 699 | path = target.determine_quilt_series_path(pygit2_repo, tree_obj) | ||
4761 | 700 | assert path == expected_path | ||
4762 | 701 | |||
4763 | 702 | |||
4764 | 703 | def test_quilt_env(pygit2_repo): | ||
4765 | 704 | tree_builder = Tree({'debian': | ||
4766 | 705 | Tree({'patches': Tree({'debian.series': Blob(b'')})}) | ||
4767 | 706 | }) | ||
4768 | 707 | tree_obj = pygit2_repo.get(tree_builder.write(pygit2_repo)) | ||
4769 | 708 | env = target.quilt_env(pygit2_repo, tree_obj) | ||
4770 | 709 | assert env == { | ||
4771 | 710 | 'EDITOR': 'true', | ||
4772 | 711 | 'QUILT_NO_DIFF_INDEX': '1', | ||
4773 | 712 | 'QUILT_NO_DIFF_TIMESTAMPS': '1', | ||
4774 | 713 | 'QUILT_PATCHES': 'debian/patches', | ||
4775 | 714 | 'QUILT_SERIES': 'debian/patches/debian.series', | ||
4776 | 715 | } | ||
4777 | 716 | |||
4778 | 717 | |||
4779 | 718 | def test_repo_quilt_env(repo): | ||
4780 | 719 | tree_builder = Tree({'debian': | ||
4781 | 720 | Tree({'patches': Tree({'debian.series': Blob(b'')})}) | ||
4782 | 721 | }) | ||
4783 | 722 | tree_obj = repo.raw_repo.get(tree_builder.write(repo.raw_repo)) | ||
4784 | 723 | env = repo.quilt_env(tree_obj) | ||
4785 | 724 | expected_inside = { | ||
4786 | 725 | 'EDITOR': 'true', | ||
4787 | 726 | 'QUILT_NO_DIFF_INDEX': '1', | ||
4788 | 727 | 'QUILT_NO_DIFF_TIMESTAMPS': '1', | ||
4789 | 728 | 'QUILT_PATCHES': 'debian/patches', | ||
4790 | 729 | 'QUILT_SERIES': 'debian/patches/debian.series', | ||
4791 | 730 | } | ||
4792 | 731 | for k, v in expected_inside.items(): | ||
4793 | 732 | assert env[k] == v | ||
4794 | 733 | |||
4795 | 734 | # In addition to the settings above, check that | ||
4796 | 735 | # GitUbuntuRepository.quilt_env has correctly merged in the usual | ||
4797 | 736 | # environment. Testing that a few keys that we expect to be set are set | ||
4798 | 737 | # should suffice. | ||
4799 | 738 | expected_other_keys = ['HOME', 'GIT_DIR', 'GIT_WORK_TREE'] | ||
4800 | 739 | for k in expected_other_keys: | ||
4801 | 740 | assert env[k] | ||
4802 | 741 | |||
4803 | 742 | |||
4804 | 743 | def test_repo_quilt_env_from_treeish_str(repo): | ||
4805 | 744 | tree_builder = Tree({'debian': | ||
4806 | 745 | Tree({'patches': Tree({'debian.series': Blob(b'')})}) | ||
4807 | 746 | }) | ||
4808 | 747 | tree_obj = repo.raw_repo.get(tree_builder.write(repo.raw_repo)) | ||
4809 | 748 | env = repo.quilt_env_from_treeish_str(str(tree_obj.id)) | ||
4810 | 749 | expected_inside = { | ||
4811 | 750 | 'EDITOR': 'true', | ||
4812 | 751 | 'QUILT_NO_DIFF_INDEX': '1', | ||
4813 | 752 | 'QUILT_NO_DIFF_TIMESTAMPS': '1', | ||
4814 | 753 | 'QUILT_PATCHES': 'debian/patches', | ||
4815 | 754 | 'QUILT_SERIES': 'debian/patches/debian.series', | ||
4816 | 755 | } | ||
4817 | 756 | for k, v in expected_inside.items(): | ||
4818 | 757 | assert env[k] == v | ||
4819 | 758 | |||
4820 | 759 | |||
4821 | 760 | def test_repo_derive_env_change(repo): | ||
4822 | 761 | # Changing the dictionary of a GitUbuntuRepository instance env attribute | ||
4823 | 762 | # must not have any effect on the env itself. While this may stretch a | ||
4824 | 763 | # little further than a normal instance property, it's worth enforcing this | ||
4825 | 764 | # as this particular attribute is at particular risk due to how it tends to | ||
4826 | 765 | # be used. | ||
4827 | 766 | e1 = repo.env | ||
4828 | 767 | e1[unittest.mock.sentinel.k] = unittest.mock.sentinel.v | ||
4829 | 768 | assert unittest.mock.sentinel.k not in repo.env | ||
4830 | 769 | |||
4831 | 770 | |||
4832 | 771 | @pytest.mark.parametrize( | ||
4833 | 772 | 'description, input_data, old_ubuntu, new_debian, expected', | ||
4834 | 773 | [ | ||
4835 | 774 | ( | ||
4836 | 775 | 'Common case', | ||
4837 | 776 | Repo( | ||
4838 | 777 | commits=[ | ||
4839 | 778 | Commit.from_spec( | ||
4840 | 779 | name='old/debian' | ||
4841 | 780 | ), | ||
4842 | 781 | Commit.from_spec( | ||
4843 | 782 | parents=[Placeholder('old/debian')], | ||
4844 | 783 | name='old/ubuntu', | ||
4845 | 784 | changelog_versions=['1-1ubuntu1', '1-1'], | ||
4846 | 785 | ), | ||
4847 | 786 | Commit.from_spec( | ||
4848 | 787 | parents=[Placeholder('old/debian')], | ||
4849 | 788 | name='new/debian', | ||
4850 | 789 | changelog_versions=['2-1', '1-1'], | ||
4851 | 790 | ), | ||
4852 | 791 | ], | ||
4853 | 792 | tags={ | ||
4854 | 793 | 'pkg/import/1-1': Placeholder('old/debian'), | ||
4855 | 794 | 'pkg/import/1-1ubuntu1': Placeholder('old/ubuntu'), | ||
4856 | 795 | 'pkg/import/2-1': Placeholder('new/debian'), | ||
4857 | 796 | }, | ||
4858 | 797 | ), | ||
4859 | 798 | 'pkg/import/1-1ubuntu1', | ||
4860 | 799 | 'pkg/import/2-1', | ||
4861 | 800 | 'pkg/import/1-1', | ||
4862 | 801 | ), | ||
4863 | 802 | ( | ||
4864 | 803 | 'Ubuntu delta based on a NMU', | ||
4865 | 804 | Repo( | ||
4866 | 805 | commits=[ | ||
4867 | 806 | Commit.from_spec( | ||
4868 | 807 | name='fork_point' | ||
4869 | 808 | ), | ||
4870 | 809 | Commit.from_spec( | ||
4871 | 810 | parents=[Placeholder('fork_point')], | ||
4872 | 811 | name='old/debian', | ||
4873 | 812 | changelog_versions=['1-1.1', '1-1'], | ||
4874 | 813 | ), | ||
4875 | 814 | Commit.from_spec( | ||
4876 | 815 | parents=[Placeholder('old/debian')], | ||
4877 | 816 | name='old/ubuntu', | ||
4878 | 817 | changelog_versions=['1-1.1ubuntu1', '1-1.1', '1-1'], | ||
4879 | 818 | ), | ||
4880 | 819 | Commit.from_spec( | ||
4881 | 820 | parents=[Placeholder('fork_point')], | ||
4882 | 821 | name='new/debian', | ||
4883 | 822 | changelog_versions=['2-1', '1-1'], | ||
4884 | 823 | ), | ||
4885 | 824 | ], | ||
4886 | 825 | tags={ | ||
4887 | 826 | 'pkg/import/1-1': Placeholder('fork_point'), | ||
4888 | 827 | 'pkg/import/1-1.1': Placeholder('old/debian'), | ||
4889 | 828 | 'pkg/import/1-1.1ubuntu1': Placeholder('old/ubuntu'), | ||
4890 | 829 | 'pkg/import/2-1': Placeholder('new/debian'), | ||
4891 | 830 | }, | ||
4892 | 831 | ), | ||
4893 | 832 | 'pkg/import/1-1.1ubuntu1', | ||
4894 | 833 | 'pkg/import/2-1', | ||
4895 | 834 | 'pkg/import/1-1.1', | ||
4896 | 835 | ), | ||
4897 | 836 | ( | ||
4898 | 837 | 'Ubuntu upstream version head of Debian', | ||
4899 | 838 | Repo( | ||
4900 | 839 | commits=[ | ||
4901 | 840 | Commit.from_spec( | ||
4902 | 841 | name='old/debian' | ||
4903 | 842 | ), | ||
4904 | 843 | Commit.from_spec( | ||
4905 | 844 | parents=[Placeholder('old/debian')], | ||
4906 | 845 | name='mid_ubuntu', | ||
4907 | 846 | changelog_versions=['1-1ubuntu1', '1-1'], | ||
4908 | 847 | ), | ||
4909 | 848 | Commit.from_spec( | ||
4910 | 849 | parents=[Placeholder('mid_ubuntu')], | ||
4911 | 850 | name='old/ubuntu', | ||
4912 | 851 | changelog_versions=['2-0ubuntu1', '1-1ubuntu1', '1-1'], | ||
4913 | 852 | ), | ||
4914 | 853 | Commit.from_spec( | ||
4915 | 854 | parents=[Placeholder('old/debian')], | ||
4916 | 855 | name='new/debian', | ||
4917 | 856 | changelog_versions=['3-1', '1-1'], | ||
4918 | 857 | ), | ||
4919 | 858 | ], | ||
4920 | 859 | tags={ | ||
4921 | 860 | 'pkg/import/1-1': Placeholder('old/debian'), | ||
4922 | 861 | 'pkg/import/1-1ubuntu1': Placeholder('mid_ubuntu'), | ||
4923 | 862 | 'pkg/import/2-0ubuntu1': Placeholder('old/ubuntu'), | ||
4924 | 863 | 'pkg/import/3-1': Placeholder('new/debian'), | ||
4925 | 864 | }, | ||
4926 | 865 | ), | ||
4927 | 866 | 'pkg/import/2-0ubuntu1', | ||
4928 | 867 | 'pkg/import/3-1', | ||
4929 | 868 | 'pkg/import/1-1', | ||
4930 | 869 | ), | ||
4931 | 870 | ], | ||
4932 | 871 | ) | ||
4933 | 872 | def test_repo_find_ubuntu_merge( | ||
4934 | 873 | description, | ||
4935 | 874 | repo, | ||
4936 | 875 | input_data, | ||
4937 | 876 | old_ubuntu, | ||
4938 | 877 | new_debian, | ||
4939 | 878 | expected, | ||
4940 | 879 | ): | ||
4941 | 880 | input_data.write(repo.raw_repo) | ||
4942 | 881 | merge_base = repo.find_ubuntu_merge_base(old_ubuntu) | ||
4943 | 882 | |||
4944 | 883 | assert merge_base | ||
4945 | 884 | |||
4946 | 885 | assert str( | ||
4947 | 886 | repo.get_commitish(merge_base).peel(pygit2.Commit).id | ||
4948 | 887 | ) == str( | ||
4949 | 888 | repo.get_commitish(expected).peel(pygit2.Commit).id | ||
4950 | 889 | ) | ||
4951 | 890 | |||
4952 | 891 | |||
4953 | 892 | def test_repo_does_cleanup(): | ||
4954 | 893 | path = tempfile.mkdtemp() | ||
4955 | 894 | try: | ||
4956 | 895 | repo = target.GitUbuntuRepository( | ||
4957 | 896 | path, | ||
4958 | 897 | delete_on_close=True, | ||
4959 | 898 | ) | ||
4960 | 899 | repo.close() | ||
4961 | 900 | assert not os.path.exists(path) | ||
4962 | 901 | finally: | ||
4963 | 902 | shutil.rmtree(path, ignore_errors=True) | ||
4964 | 903 | |||
4965 | 904 | |||
4966 | 905 | def test_repo_does_not_cleanup(): | ||
4967 | 906 | path = tempfile.mkdtemp() | ||
4968 | 907 | try: | ||
4969 | 908 | repo = target.GitUbuntuRepository( | ||
4970 | 909 | path, | ||
4971 | 910 | delete_on_close=False, | ||
4972 | 911 | ) | ||
4973 | 912 | repo.close() | ||
4974 | 913 | assert os.path.exists(path) | ||
4975 | 914 | finally: | ||
4976 | 915 | shutil.rmtree(path, ignore_errors=True) | ||
4977 | 916 | |||
4978 | 917 | |||
4979 | 918 | @pytest.mark.parametrize( | ||
4980 | 919 | [ | ||
4981 | 920 | 'year', | ||
4982 | 921 | 'month', | ||
4983 | 922 | 'day', | ||
4984 | 923 | 'hours', | ||
4985 | 924 | 'minutes', | ||
4986 | 925 | 'seconds', | ||
4987 | 926 | 'milliseconds', | ||
4988 | 927 | 'hour_delta', | ||
4989 | 928 | 'expected', | ||
4990 | 929 | ], [ | ||
4991 | 930 | (1970, 1, 1, 0, 0, 0, 0, 0, (0, 0)), | ||
4992 | 931 | (1970, 1, 1, 0, 0, 0, 600, 0, (0, 0)), | ||
4993 | 932 | (1970, 1, 1, 1, 0, 0, 0, 1, (0, 60)), | ||
4994 | 933 | (1970, 1, 1, 0, 0, 0, 0, -1, (3600, -60)), | ||
4995 | 934 | (1971, 2, 3, 4, 5, 6, 7, -8, (34430706, -480)), | ||
4996 | 935 | ] | ||
4997 | 936 | ) | ||
4998 | 937 | def test_datetime_to_signature_spec( | ||
4999 | 938 | year, | ||
5000 | 939 | month, |
PASSED: Continuous integration, rev:7f25e4872b2 d134925d3c6768b e7ece34e24f112 /jenkins. ubuntu. com/server/ job/git- ubuntu- ci/63/
https:/
Executed test runs:
SUCCESS: VM Setup
SUCCESS: Build
SUCCESS: VM Reset
SUCCESS: Unit Tests
IN_PROGRESS: Declarative: Post Actions
Click here to trigger a rebuild: /jenkins. ubuntu. com/server/ job/git- ubuntu- ci/63// rebuild
https:/