Merge ~racb/git-ubuntu:prepare-upload-adjustments into git-ubuntu:master

Proposed by Robie Basak
Status: Superseded
Proposed branch: ~racb/git-ubuntu:prepare-upload-adjustments
Merge into: git-ubuntu:master
Diff against target: 19208 lines (+18908/-0) (has conflicts)
50 files modified
doc/README.md (+117/-0)
doc/SPECIFICATION (+167/-0)
doc/release-process.md (+264/-0)
gitubuntu/changelog_date_overrides.txt (+19/-0)
gitubuntu/changelog_tests/maintainer_name_inner_space (+8/-0)
gitubuntu/changelog_tests/maintainer_name_leading_space (+8/-0)
gitubuntu/changelog_tests/maintainer_name_trailing_space (+8/-0)
gitubuntu/changelog_tests/test_date_1 (+8/-0)
gitubuntu/changelog_tests/test_date_2 (+8/-0)
gitubuntu/changelog_tests/test_distribution (+8/-0)
gitubuntu/changelog_tests/test_distribution_source_1 (+8/-0)
gitubuntu/changelog_tests/test_distribution_source_2 (+8/-0)
gitubuntu/changelog_tests/test_distribution_source_3 (+8/-0)
gitubuntu/changelog_tests/test_distribution_source_4 (+8/-0)
gitubuntu/changelog_tests/test_maintainer_1 (+8/-0)
gitubuntu/changelog_tests/test_maintainer_2 (+8/-0)
gitubuntu/changelog_tests/test_maintainer_3 (+8/-0)
gitubuntu/changelog_tests/test_versions_1 (+8/-0)
gitubuntu/changelog_tests/test_versions_2 (+14/-0)
gitubuntu/changelog_tests/test_versions_3 (+26/-0)
gitubuntu/clone.py (+178/-0)
gitubuntu/git_repository.py (+3026/-0)
gitubuntu/git_repository_test.py (+1191/-0)
gitubuntu/importer.py (+2703/-0)
gitubuntu/importer_service.py (+916/-0)
gitubuntu/importer_service_broker.py (+178/-0)
gitubuntu/importer_service_poller.py (+239/-0)
gitubuntu/importer_service_poller_test.py (+66/-0)
gitubuntu/importer_service_worker.py (+311/-0)
gitubuntu/importer_test.py (+2288/-0)
gitubuntu/prepare_upload.py (+215/-0)
gitubuntu/prepare_upload_test.py (+268/-0)
gitubuntu/repo_builder.py (+450/-0)
gitubuntu/scriptutils.py (+226/-0)
gitubuntu/source-package-allowlist.txt (+2881/-0)
gitubuntu/source-package-denylist.txt (+56/-0)
gitubuntu/source_builder.py (+344/-0)
gitubuntu/source_information.py (+785/-0)
gitubuntu/source_information_test.py (+503/-0)
gitubuntu/submit.py (+252/-0)
man/man1/git-ubuntu-clone.1 (+68/-0)
man/man1/git-ubuntu-export-orig.1 (+63/-0)
man/man1/git-ubuntu-import.1 (+224/-0)
man/man1/git-ubuntu-merge.1 (+134/-0)
man/man1/git-ubuntu-queue.1 (+96/-0)
man/man1/git-ubuntu-remote.1 (+86/-0)
man/man1/git-ubuntu-submit.1 (+97/-0)
man/man1/git-ubuntu-tag.1 (+88/-0)
man/man1/git-ubuntu.1 (+217/-0)
setup.py (+40/-0)
Conflict in doc/README.md
Conflict in doc/SPECIFICATION
Conflict in doc/release-process.md
Conflict in gitubuntu/changelog_date_overrides.txt
Conflict in gitubuntu/changelog_tests/maintainer_name_inner_space
Conflict in gitubuntu/changelog_tests/maintainer_name_leading_space
Conflict in gitubuntu/changelog_tests/maintainer_name_trailing_space
Conflict in gitubuntu/changelog_tests/test_date_1
Conflict in gitubuntu/changelog_tests/test_date_2
Conflict in gitubuntu/changelog_tests/test_distribution
Conflict in gitubuntu/changelog_tests/test_distribution_source_1
Conflict in gitubuntu/changelog_tests/test_distribution_source_2
Conflict in gitubuntu/changelog_tests/test_distribution_source_3
Conflict in gitubuntu/changelog_tests/test_distribution_source_4
Conflict in gitubuntu/changelog_tests/test_maintainer_1
Conflict in gitubuntu/changelog_tests/test_maintainer_2
Conflict in gitubuntu/changelog_tests/test_maintainer_3
Conflict in gitubuntu/changelog_tests/test_versions_1
Conflict in gitubuntu/changelog_tests/test_versions_2
Conflict in gitubuntu/changelog_tests/test_versions_3
Conflict in gitubuntu/clone.py
Conflict in gitubuntu/git_repository.py
Conflict in gitubuntu/git_repository_test.py
Conflict in gitubuntu/importer.py
Conflict in gitubuntu/importer_service.py
Conflict in gitubuntu/importer_service_broker.py
Conflict in gitubuntu/importer_service_poller.py
Conflict in gitubuntu/importer_service_poller_test.py
Conflict in gitubuntu/importer_service_worker.py
Conflict in gitubuntu/importer_test.py
Conflict in gitubuntu/prepare_upload.py
Conflict in gitubuntu/prepare_upload_test.py
Conflict in gitubuntu/repo_builder.py
Conflict in gitubuntu/scriptutils.py
Conflict in gitubuntu/source-package-allowlist.txt
Conflict in gitubuntu/source-package-blacklist.txt
Conflict in gitubuntu/source-package-denylist.txt
Conflict in gitubuntu/source-package-whitelist.txt
Conflict in gitubuntu/source_builder.py
Conflict in gitubuntu/source_information.py
Conflict in gitubuntu/source_information_test.py
Conflict in gitubuntu/submit.py
Conflict in man/man1/git-ubuntu-clone.1
Conflict in man/man1/git-ubuntu-export-orig.1
Conflict in man/man1/git-ubuntu-import.1
Conflict in man/man1/git-ubuntu-merge.1
Conflict in man/man1/git-ubuntu-queue.1
Conflict in man/man1/git-ubuntu-remote.1
Conflict in man/man1/git-ubuntu-submit.1
Conflict in man/man1/git-ubuntu-tag.1
Conflict in man/man1/git-ubuntu.1
Conflict in setup.py
Reviewer Review Type Date Requested Status
Athos Ribeiro (community) Approve
Server Team CI bot continuous-integration Approve
git-ubuntu developers Pending
Review via email: mp+413881@code.launchpad.net

This proposal has been superseded by a proposal from 2023-05-25.

Commit message

Make Jenkins happy

To post a comment you must log in.
Revision history for this message
Server Team CI bot (server-team-bot) wrote :

PASSED: Continuous integration, rev:7f25e4872b2d134925d3c6768be7ece34e24f112
https://jenkins.ubuntu.com/server/job/git-ubuntu-ci/63/
Executed test runs:
    SUCCESS: VM Setup
    SUCCESS: Build
    SUCCESS: VM Reset
    SUCCESS: Unit Tests
    IN_PROGRESS: Declarative: Post Actions

Click here to trigger a rebuild:
https://jenkins.ubuntu.com/server/job/git-ubuntu-ci/63//rebuild

review: Approve (continuous-integration)
Revision history for this message
Athos Ribeiro (athos-ribeiro) wrote :

LGTM!

At first I wondered if it would be a good idea to verify the contents of the "headers" dict in "cli_printargs", but then I realized it is already being done in "push" (even though that is being done through calls to "assert" and therefore we do rely on __debug__ being set to True).

review: Approve
Revision history for this message
Robie Basak (racb) wrote :

> At first I wondered if it would be a good idea to verify the contents of the "headers" dict in "cli_printargs", but then I realized it is already being done in "push"

Right - it's more tedious to test from something closer to the CLI interface, so I tend to test the inner bits more directly.

> (even though that is being done through calls to "assert" and therefore we do rely on __debug__ being set to True).

I'm not sure we're on the same page here. What I mean above is that I'm testing the contents of the "headers" dict in prepare_upload_test.py using tests that test the behaviour of the prepare_upload.py::push(). assert statements from there are the usual pattern for pytest-based test suites and test suites are expected to always run with asserts enabled.

There are separate assert statements in the code itself in prepare_upload.py::push(), but these are there to state (and runtime verify when asserts are enabled) invariants that would help fail earlier and more helpfully if there is a bug somewhere. But I intend to test every actual case from the test suite in prepare_upload_test.py. If there's something you spotted that you think isn't being tested from there, I'd like to add it!

Revision history for this message
Athos Ribeiro (athos-ribeiro) wrote :

> I'm not sure we're on the same page here. What I mean above is that I'm testing the contents of the "headers" dict in prepare_upload_test.py using tests that test the behaviour of the prepare_upload.py::push(). assert statements from there are the usual pattern for pytest-based test suites and test suites are expected to always run with asserts enabled.

I was referring to the assert calls in `gitubuntu/prepare_upload.py`, as you mentioned later in your reply. I was just wondering why they are not explicitly raising exceptions instead. The comments in that file do justify them though:

> # However, we don't know of any actual case when this might happen, so
  # these are assertions rather than fully UX-compliant error paths.

> If there's something you spotted that you think isn't being tested from there, I'd like to add it!

The only thing that came to mind was the regular expression to parse the git URL. Although it is based on the one present in `https://git.launchpad.net/launchpad/tree/lib/lp/app/validators/name.py`, it is slightly different since it allows the username to start with one of the allowed special characters. Parametrizing that test to include other valid username examples could prevent mistakes in future changes to that regex. However, this looks simple enough and perhaps the effort is just not worth here.

This LGTM :)

1f99957... by Lena Voytek

Updates for inclusive naming

Edit filenames, variables, and comments to match inclusive naming
standards. The user experience will remain the same with this
update. However, when allowing and denying specific packages,
additions must be placed in source-package-allowlist.txt and
source-package-denylist.txt instead of source-package-whitelist.txt
and source-package-blacklist.txt.

Signed-off-by: Lena Voytek <email address hidden>

ef3b3a9... by Robie Basak

Merge remote-tracking branch 'lvoytek/master'

57e5776... by Robie Basak

Update maintainer email address

<email address hidden> no longer exists. Use
<email address hidden> instead.

002e452... by Robie Basak

Update email address to request an import

This is for the message printed when a repository is not found on "git
ubuntu clone".

Since <email address hidden> no longer exists, we'll use
<email address hidden> instead.

2f0e855... by Robie Basak

Update email address default used in tests

Since <email address hidden> no longer exists, we'll use
<email address hidden> instead. This shouldn't affect
production code since the repo_builder and source_builder modules are
only used in tests.

5aad111... by Robie Basak

Update default bot account name

usd-importer-bot is now renamed to git-ubuntu-bot as part of catching up
the project rename to git-ubuntu.

5b0868f... by Sergio Durigan Junior

Accept ref names containing plus sign

Currently, if the ref name contains a plus sign, git ubuntu will fail
due to the following assertion error:

Traceback (most recent call last):
  File "/snap/git-ubuntu/891/usr/bin/git-ubuntu", line 11, in <module>
    load_entry_point('gitubuntu==1.0', 'console_scripts', 'git-ubuntu')()
  File "/snap/git-ubuntu/891/usr/lib/python3/dist-packages/gitubuntu/__main__.py", line 270, in main
    sys.exit(args.func(args))
  File "/snap/git-ubuntu/891/usr/lib/python3/dist-packages/gitubuntu/prepare_upload.py", line 170, in cli_printargs
    headers = push(
  File "/snap/git-ubuntu/891/usr/lib/python3/dist-packages/gitubuntu/prepare_upload.py", line 118, in push
    assert gitubuntu.importer.VCS_GIT_REF_VALIDATION.fullmatch(ref.name)
AssertionError

However, branch names (which compose ref names) are allowed to contain
the plus sign. This commit expands the VCS_GIT_REF_VALIDATION regexp
to accept that.

FWIW, I triggered this assertion when I named my branch after the
Debian release I was merging (for the net-snmp package):

  merge-5.9.1+dfsg-4-kinetic

f5dc43c... by Robie Basak

Clean up get_head_versions()

This method had no tests, and returning a pygit2.Branch object made it
harder to supply test data to other functions that accept data in the
structure returned by this method.

In practice, callers only need the version, commit time and commit hash
of each branch head, so return only exactly this, and adjust all
callers. This should not change any behaviour.

We also adjust and fill out the docstring.

A unit test will follow in a subsequent change. It can't be added here
without fixing a bug first.

b2f98c1... by Robie Basak

GitUbuntuSourceInformation: dependency injection

Add dependency injection to GitUbuntuSourceInformation. This allows the
creation of this object in tests such that we can mock a
launchpadlib.launchpad.Launchpad object.

a11bfa8... by Robie Basak

launchpad_versions_published_after: refactor call

We don't need to set args and then call with **args, given that is all
we do with it. Instead, just call self.archive.getPublishedSources()
with keyword arguments directly.

9addaa0... by Robie Basak

launchpad_versions_published_after: drop return

This return statement is redundant since it's at the end of the method
anyway.

535dca8... by Robie Basak

Use date_created to determine head versions

It's incorrect to use date_published to determine head versions that
will be used to match against Launchpad publications, since we use
date_created at commit creation time. We should be using date_created
consistently instead.

Not doing so means that we often (always?) fail to find a matching
Launchpad publication that is already imported and end up redundantly
reimporting everything from the beginning of time. This is terrible for
performance.

More details on date_created vs. date_published here:
https://irclogs.ubuntu.com/2020/04/16/%23launchpad.html#t13:45

This change is difficult to test right here. Further refactoring follows
in subsequent changes and a test is added later.

LP: #1979650

0b8cc0b... by Robie Basak

Rewrite launchpad_versions_published_after

The logic in this method can be simplified significantly with a rewrite.
To mitigate any regression, a parametrized unit test is added with the
expected behaviour thought out from the importer spec.

There is still an inefficiency present here. In theory we could skip
importing pocket copies if the branch corresponding to a pocket is
up-to-date. However, currently the algorithm only matches against the
exact date_created attribute of the Launchpad publication object against
which a particular version was first imported. To ensure that branches
are updated if any new pocket copies have occurred, we must "replay"
them all through the importer. Therefore there is potential here for a
future performance improvement.

284beb9... by Robie Basak

Add unit test for get_head_info()

Now that get_head_info() returns what we expect, we can add a unit test
for it now.

2133870... by Robie Basak

Refactor _head_version_is_equal

Add a docstring, refactor the code to make it more readable, and rename
the method to match its definition better.

This should not result in any functional change.

06f0eca... by Robie Basak

More project renames

This is a followup to commits 57e5776, 002e452, 2f0e855 and 5aad111 with
further cleanups around the project rename from usd-importer to
git-ubuntu and the move to the <email address hidden>
mailing list. Thanks to Bryce for spotting some of the remaining pieces.

Where code is no longer used at all, or docs are completely out-of-date,
I've removed it instead of renaming the relevant bits. I've not worried
too much about fixing docs that I've touched if there's some value to
them staying, as that's a bit of a rabbit hole and I'd prefer to make
incremental progress.

0e3ca5f... by Robie Basak

submit: default to ~canonical-server-reporter

On the Canonical Server Team, we have been using ~canonical-server in
its own review slot for the sole purpose of gathering all reviews we're
interested in tracking together on this team's +activereviews page.

A problem with this is that we all belong to this team, so when a person
does a review, they sometimes accidentally "grab" that slot, so it
appears as their name rather than the team's, and thus disappears from
the report.

One way around this is to use a separate team that none of us actually
belong to. This way we can't "grab" that slot.

This changes the default team to this new ~canonical-server-reporter
team to help those who use the "git ubuntu submit" command.

4d497a7... by Robie Basak

Add comment on missing observability

This should help locate the older emailing code should it be needed in
the future.

c8216a7... by Robie Basak

Add test to accept refs that contain '+'

ab3351a... by Robie Basak

Improve documentation on validation constants

This should hopefully do a better job of signposting anyone who wants to
change the constants to better understand the implications of doing so.

7c524e6... by Robie Basak

importer: flip sense of push arguments

Instead of passing around an inverse boolean "no_push" argument, pass
the more natural "push" instead.

This should make no functional change, but prepares us to invert the CLI
argument since after that future change passing around an inverse
boolean will make even less sense.

7f3a586... by Robie Basak

importer: flip CLI argument default to not push

git ubuntu import has two uses: 1) it's run by the importer service
workers, which should push by default; 2) it's run by users, for whom it
doesn't generally make sense to push by default.

Since it's easy for importer service workers to specify an option by
automation, we flip the default of the CLI to explicitly require --push
if you want to push. This makes it safer and easier to explain to users
how to use the import command locally.

The importer service worker then adds --push unconditionally.

This makes the "implied no push" behaviour of certain options redundant,
so those are removed.

0901740... by Robie Basak

scriptutils: remove pool_map_import_srcpkg()

This function is no longer used from anywhere.

4a01d97... by Robie Basak

importer-service-worker: add --no-push argument

Asking the worker for --no-push enables a deeper dry run for performance
testing purposes. Unlike the behaviour of "git ubuntu import", "push
mode" is the default here because normally when one sets up the importer
service, it would be surprising behaviour not to do this, and this is
the only use case for this command.

ab5d7ad... by Robie Basak

Move import_srcpkg() to importer_service_worker.py

This function is only used from here, so there's no need for it to be in
a different module.

8681418... by Robie Basak

_main_with_repo: simplify if statement

Since the normal operation is to push, and straight after that we
"return 0", it's simpler to immediately "return 0" if we don't want to
push. This stops the usual code path being pushed "to the right", and
makes it easier to follow the logic.

This should not result in a functional change.

f9a91d9... by Robie Basak

Add changelog date override for gmsh

36eef3f... by Robie Basak

prepare-upload: add test for ssh:// URL rewrites

According to LP 1942985, this is another case where a rewrite is
expected.

695a4d4... by Robie Basak

prepare-upload: handle ssh:// rewrites

According to LP 1942985, this is another case where a URL rewrite is
expected.

Unmerged commits

695a4d4... by Robie Basak

prepare-upload: handle ssh:// rewrites

According to LP 1942985, this is another case where a URL rewrite is
expected.

36eef3f... by Robie Basak

prepare-upload: add test for ssh:// URL rewrites

According to LP 1942985, this is another case where a rewrite is
expected.

cc48a7f... by Robie Basak

prepare-upload: output invalid option on failure

If "git ubuntu prepare-upload args" fails for whatever reason, we don't
want dpkg-buildpackage or similar to proceed if invoked using
"dpkg-buildpackage $(git ubuntu prepare-upload args)" as this will
silently hide the error. Instead, we can output an invalid option
"--git-ubuntu-prepare-upload-args-failed" which should cause
dpkg-buildpackage to fail, and hopefully lead the user to find the cause
in stderr from the failure in our command.

This change implments this new behaviour.

LP: #1942865

d8750f1... by Robie Basak

prepare-upload: test for invalid option on failure

If "git ubuntu prepare-upload args" fails for whatever reason, we don't
want dpkg-buildpackage or similar to proceed if invoked using
"dpkg-buildpackage $(git ubuntu prepare-upload args)" as this will
silently hide the error. Instead, we can output an invalid option
"--git-ubuntu-prepare-upload-args-failed" which should cause
dpkg-buildpackage to fail, and hopefully lead the user to find the cause
in stderr from the failure in our command.

This change adds the test for this behaviour, prior to implementation.

3c5c8c7... by Robie Basak

prepare-upload: rewrite LP git+ssh:// URLs

Automatically supply the corresponding https:// LP URL for the rich
history changes file headers if a git+ssh:// LP URL is used.

LP: #1942985

76824a3... by Robie Basak

prepare-upload: refactor header data handling

Explicitly pull out the three header data items into their own named
variables to avoid confusion.

dfa0998... by Robie Basak

prepare-upload: add test for git+ssh:// rewrite

Identified in LP: #1942985: if a user has a git+ssh:// LP URL, we should
automatically rewrite it to the https:// one.

This is the test for this, which is expected to fail because it isn't
fixed yet.

8fb8e5a... by Robie Basak

Fix typos in test docstrings

ab5d7ad... by Robie Basak

Move import_srcpkg() to importer_service_worker.py

This function is only used from here, so there's no need for it to be in
a different module.

4a01d97... by Robie Basak

importer-service-worker: add --no-push argument

Asking the worker for --no-push enables a deeper dry run for performance
testing purposes. Unlike the behaviour of "git ubuntu import", "push
mode" is the default here because normally when one sets up the importer
service, it would be surprising behaviour not to do this, and this is
the only use case for this command.

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
diff --git a/doc/README.md b/doc/README.md
0new file mode 1006440new file mode 100644
index 0000000..ff10689
--- /dev/null
+++ b/doc/README.md
@@ -0,0 +1,117 @@
1<<<<<<< doc/README.md
2=======
3## Running the git-ubuntu importer ##
4This just covers how to run [`git ubuntu import`](https://code.launchpad.net/git-ubuntu).
5
6## Getting via snap ##
7The preferred installation method is to install via snap:
8
91. install the snap
10
11 $ snap install --classic git-ubuntu
12
13## [Alternate:] Getting via git ##
14Less well tested, but in theory this should work as well.
15
161. Get `git-ubuntu` from git
17
18 $ git clone git://git.launchpad.net/git-ubuntu git-ubuntu
19or
20
21 $ git clone https://git.launchpad.net/git-ubuntu git-ubuntu
22or
23
24 $ git clone ssh://git.launchpad.net/git-ubuntu git-ubuntu
25
26
272. Put it in your PATH
28
29 $ PATH="$PWD/git-ubuntu/bin:$PATH"
30
313. Get necessary dependencies
32
33 $ sudo apt update -qy
34
35 $ deps="dpkg-dev git-buildpackage python3-argcomplete \
36 python3-lazr.restfulclient python3-debian python3-distro-info \
37 python3-launchpadlib python3-pygit2 python3-ubuntutools \
38 python3-cachetools python3-pkg-resources python3-pytest \
39 python3-petname quilt"
40
41 $ sudo apt install -qy ${deps}
42
43
44## Running ##
45
46 * For local usage
47
48 `git ubuntu import` will push to launchpad git by default. If you just want to get a git repo locally of a given package, then:
49
50 $ mkdir ${HOME}/Imports
51 $ PKG=uvtool
52 $ git ubuntu import -v --no-push --directory=${HOME}/Imports/$PKG $PKG
53
54 * As member of [git-ubuntu-import](https://launchpad.net/~git-ubuntu-import) for official publishing.
55
56 $ PKG=uvtool
57 $ LP_USER=smoser # your launchpad user name if different from $USER
58 $ git ubuntu import -v --directory=$PKG --lp-user=$LP_USER $PKG
59
60## Shell completion (bash) ##
61
62 * `git-ubuntu` will autocomplete by default if global argcomplete has
63 been enabled
64 (https://github.com/kislyuk/argcomplete#activating-global-completion)
65 or specify
66
67 eval "$(register-python-argcomplete git-ubuntu)"
68
69 The snap version does this by default.
70
71 * `git ubuntu` autocompletion is a little more challenging. To enable
72 it, add the following to your .bashrc or similar:
73
74 if [ -f /path/to/git-ubuntu/doc/gitubuntu-completion.sh ]; then
75 . /path/to/git-ubuntu/doc/gitubuntu-completion.sh
76 fi
77
78 For the snap version, this would look like:
79
80 if [ -f /snap/git-ubuntu/current/doc/gitubuntu-completion.sh ]; then
81 . /snap/git-ubuntu/current/doc/gitubuntu-completion.sh
82 fi
83
84## View Output ##
85If you did a local checkout with `--directory=./$PKG` then you'll have a git repository in `./$PKG/git`.
86
87 $ cd $PKG
88 $ git branch
89 ubuntu/saucy
90 ubuntu/saucy-proposed
91 ubuntu/trusty
92 ubuntu/trusty-proposed
93 ubuntu/utopic
94 ubuntu/utopic-proposed
95 ubuntu/vivid
96 ubuntu/vivid-proposed
97 ubuntu/wily
98 ubuntu/xenial
99 ubuntu/yakkety
100
101If you did `--lp-owner=git-ubuntu-import`, then your repo should be
102listed in web view at [https://code.launchpad.net/~git-ubuntu-import/+git]. And it should be able to be cloned with:
103
104 $ git clone https://git.launchpad.net/~git-ubuntu-import/ubuntu/+source/$PKG
105or
106
107 $ git clone lp:~git-ubuntu-import/ubuntu/+source/$PKG
108or
109
110 $ git ubuntu clone $PKG
111
112
113## Links ##
114 * [GitWorkflow Wiki page](https://wiki.ubuntu.com/UbuntuDevelopment/Merging/GitWorkflow)
115 * [Launchpad git for git-ubuntu](https://code.launchpad.net/git-ubuntu)
116 * [Git view of git-ubuntu](https://git.launchpad.net/git-ubuntu)
117>>>>>>> doc/README.md
diff --git a/doc/SPECIFICATION b/doc/SPECIFICATION
0new file mode 100644118new file mode 100644
index 0000000..21cd184
--- /dev/null
+++ b/doc/SPECIFICATION
@@ -0,0 +1,167 @@
1<<<<<<< doc/SPECIFICATION
2=======
3Specification
4
5git URL shortcuts used (add these to ~/.gitconfig or expand them
6manually yourself):
7
8[url "ssh://<LPID>@git.launchpad.net/~<LPID>/ubuntu/+source/"]
9 insteadof = lpmep:
10
11Definitions: "old debian", "old ubuntu", "new debian", "new ubuntu" are
12as understood. Make sure that "old debian" is really the last common
13ancestor of "old ubuntu" and "new debian". Determining this is
14especially prone to error if Ubuntu imported new upstream versions since
15it diverged from Debian. If this is wrong, then pain will result.
16
17By "merge" we always mean an "Ubuntu merge", which is in git terms
18really a rebase. No actual git merge takes place in this entire
19workflow.
20
21No trees in this workflow ever have quilt patches applied. All commits
22are with quilt fully popped and no .pc directory. Changes to quilt
23patches are seen in debian/patches/* only.
24
25Common git references expected (T for tag, B for branch):
26
27Things that will be imported by a sponsor or the importer (available
28from: lpusip:<package>; ask a sponsor if missing):
29
30* T import/<version> and T upload/<version>
31 * Logically this is the tree corresponding to a particular tag;
32 history is secondary.
33 * The tree is identical to corresponding source package version in the
34 archive.
35 * For T import/<version>: imported from the archive and pushed to
36 ~git-ubuntu-import as an authoritative source.
37 * For T upload/<version>: pushed to ~ubuntu-server-dev by an uploader
38 to record exactly what was uploaded.
39 * Pushing to ~ubuntu-server-dev is restricted to uploaders.
40 * The parent commit should be the previous version import or upload
41 tag where available. An orphan commit is acceptable in the
42 exceptional case that this is not possible.
43
44* B ubuntu/devel
45 * Logically this is our moving reference for what is currently in the
46 Ubuntu development release.
47 * In ~ubuntu-server-dev, this must always point to something also
48 tagged as import/<version> or upload/<version>.
49 * Pushing to ~ubuntu-server-dev is restricted to uploaders.
50 * This branch will be rebased to new Debian imports during Ubuntu
51 "merges" (but tags will be left behind).
52
53Things that should be made available to a sponsor when submitting a
54merge for upload (push to: lpmep:<package>):
55
56* T logical/<old ubuntu>
57 * Logically, this is a patchset
58 ({import,upload}/<old debian>..logical/<old ubuntu>).
59 * Breakdown of previous Ubuntu delta.
60 * Must be based on an official import/<old debian> or upload/<old debian>
61 tag ("official" means from ~ubuntu-server-dev).
62 * One commit per logical change over the entire Ubuntu delta.
63 * Churn squashed.
64 * No upstream changes (so only changes in debian/*).
65 * No changes to debian/changelog.
66 * No "update-maintainer" or "Vcs-*" or other meta changes.
67 * To get to this, you will probably start from reconstruct/<old ubuntu>,
68 described below.
69 * Coherence checks:
70 - Identical to the corresponding import/<version> except for:
71 + Meta changes (update-maintainer, Vcs-*) in debian/control.
72 + Anything not in debian/*, which should be unchanged
73 (exceptionally this happens when new upstream versions were
74 imported ahead of Debian).
75 + debian/changelog, which should be unchanged.
76 - No line should be touched twice, except where separate logical
77 changes need to touch the same line.
78 * Providing this makes it easy for the sponsor to check a proposed
79 merge:
80 1. Check correctness of this tag against the previous Ubuntu delta
81 (perform the above checks and use "git log -p" to make
82 sure each logical commit describes only its own changes).
83 2. Ensure that every commit here is accounted for in the proposed
84 merge.
85
86* B merge
87 * Proposed merge for upload.
88 * Based on import/<new debian> or upload/<new debian>.
89 * One commit per logical change; no changes to debian/changelog in
90 those commits.
91 * One commit for each of merge-changelogs, reconstruct-changelog, any
92 changelog tweaks and ubuntu-meta (or update-maintainer as you wish).
93 * debian/changelog should be "released" with the version string
94 matching the proposed upload version and targeting the correct
95 pocket.
96 * Add commits to the end of this branch in response to reviewer
97 comments.
98 * If agreed with your sponsor that for the changes requested a new
99 rebased merge branch will be easier to manage than adding commits to
100 the end, then do this instead. Rebase the original "merge" branch.
101 To keep history, if you wish tag the old one "merge.v1". You may
102 also rebase like this as you wish during preparation before
103 presenting this branch for review.
104
105Things you may want to make available to reviewers so that they can
106check your process (push to: lpmep:<package>), for which we have
107standardised names:
108
109* T reconstruct/<old ubuntu>
110 * Logically, this is a patchset
111 ({import,upload}/<old debian>..reconstruct/<old ubuntu>).
112 * Based on import/<old debian>. For each Ubuntu upload since then:
113 * One commit to pull in a new upstream if there is one (rare). This
114 must not contain any changes to debian/.
115 * One commit per logical change.
116 * One commit for changelog.
117 * One commit for any ubuntu-meta/update-maintainer change (usually
118 only in merge uploads).
119 * Drop non-logical commits from this tip and rebase to squash and
120 split to derive the logical/<old ubuntu> tag.
121
122* T merge.v1, merge.v2, etc.
123 * The old state of each merge branch before you rebased it. Only
124 useful if you rebased during your merge. If done after your initial
125 review request, please only do this with agreement of your sponsor,
126 since it causes your sponsor more review time.
127
128Merge proposal to make in Launchpad:
129
130lpmep:<package> merge → lpusdp:<package> ubuntu/devel
131
132After review:
133
134If adding commits in response to reveiwer comments, just push again to
135lpmep:<package> merge.
136
137If (exceptionally) rebasing in response to reviewer comments:
138 1. Tag the old branch "merge.v1" (or v2, v3 etc. for future iterations)
139 2. Rebase the "merge" branch as required
140 3. Push to lpmep:<package>:
141 a) The new "v" tag from above.
142 b) The merge branch (force will be required).
143
144For "traditional" sponsors:
145
146git can easily generate the traditional debdiffs that you normally
147review. Assuming you have appropriate remote tracking branches:
148
149 * For Ubuntu → Ubuntu, "git diff lpusdp/ubuntu/devel sponsoree/merge"
150 * For Debian → Ubuntu, "git diff lpusdp/debian/sid sponsoree/merge"
151
152Or you can ask the sponsoree to generate these for you.
153
154To upload a reviewed merge (for the sponsor):
155
156(Sponsors: you can just ignore these instructions and upload the
157traditional way if you like. But sponsorees cannot push to our VCS and
158you can, so it would be nice if you could push this please, so a future
159merger doesn't have to reconstruct the lost information).
160
1611. Upload using dput as usual.
1622. Tag the merge branch "upload/<version>" (replace ':' and '~' with '_'
163 to meet git's naming requirements). A lightweight tag is fine, or
164 go ahead and annotate if you want to include any extra notes.
1653. Force push the merge branch to lpusdp:<package> ubuntu/devel.
1664. Push the "upload/<version>" tag to lpusdp:<package>.
167>>>>>>> doc/SPECIFICATION
diff --git a/doc/release-process.md b/doc/release-process.md
0new file mode 100644168new file mode 100644
index 0000000..524736e
--- /dev/null
+++ b/doc/release-process.md
@@ -0,0 +1,264 @@
1<<<<<<< doc/release-process.md
2=======
3Release Process
4===============
5
61. Set the new version number
7------------------------------
8
9See gitubuntu/version.py for the current version number.
10
11 $ export LAST_RELEASE=$(cat gitubuntu/version.py | cut -d\' -f2)
12 $ echo "${LAST_RELEASE}"
13
14Git Ubuntu's version numbers follow the common MAJOR.MINOR.PATCH and
15MAJOR.MINOR.PATCH-rcN patterns, where for this project these are
16interpreted as follows:
17
18 - MAJOR is updated for API breaking changes such as alterations in
19 importer hash ABI stability. As a special rule, MAJOR=0 indicates
20 no stability guarantees. Notably, changes in MAJOR version are not
21 guaranteed to be forward or backward compatible with earlier MAJOR
22 versions.
23
24 - MINOR is incremented for feature-level changes that may alter how
25 the git ubuntu frontends behave, including breaking changes in how
26 git ubuntu subcommands and their parameters work. The importer API,
27 however, is intended to be backward compatible from one MINOR
28 version to the next, with no breaking changes.
29
30 - PATCH is incremented for bug fixes and routine feature additions
31 that introduce no compatibility issues for either the backend
32 importer or the frontend client. In particular, new commands and
33 parameters may be introduced, but existing ones will not be changed
34 or removed.
35
36 - rcN indicates a release candidate, using a sequential numbering for
37 'N'.
38
39Define the new version for the release:
40
41 $ export VERSION="<MAJOR>.<MINOR>.<PATCH>"
42
43Or, for a release candidate:
44
45 $ export VERSION="<MAJOR>.<MINOR>.<PATCH>-rcN"
46
47Set it in the git repo:
48
49 $ git checkout -b ${VERSION}-release
50 $ echo "VERSION = '${VERSION}'" > gitubuntu/version.py
51 $ git commit gitubuntu/version.py -m "version: bump to ${VERSION}"
52 $ git tag --annotate -m "${VERSION} Release" ${VERSION}
53
54The annotated tag is necessary, because the snap build mechanisms
55determine the version to set in the snap based on it.
56
57
582. Draft release announcement
59------------------------------
60
61The release announcement generally summarizes the major changes in the
62release, and (where possible) identifies the bug fixes included in it.
63Some examples of past release announcements:
64
65 - 0.2.1: https://lists.ubuntu.com/archives/ubuntu-server/2017-September/007594.html
66 - 0.3.0: https://lists.ubuntu.com/archives/ubuntu-server/2017-October/007598.html
67 - 0.4.0: https://lists.ubuntu.com/archives/ubuntu-server/2017-October/007605.html
68 - 0.7.1: https://lists.ubuntu.com/archives/ubuntu-server/2018-March/007667.html
69
70The git log can be referred to for changes worth mentioning:
71
72 $ git log --stat ${LAST_RELEASE}..
73
74If desired, a shortlog can be appended to the release announcement, to
75itemize all changes:
76
77 $ git shortlog ${LAST_RELEASE}...
78
79
803. Testing
81-----------
82
83First check there are no unexpected test failures in trunk:
84
85 $ python3 ./setup.py check
86 $ python3 ./setup.py build
87 $ pytest-3 .
88
89Optionally, the full test suite can be directly executed, although since
90it has some rather exacting dependencies, it may not be able to build
91properly.
92
93 $ python3 ./setup.py test
94
95Next, push a copy of the branch up to launchpad under your own namespace
96for Continuous Integration (CI) testing:
97
98 $ git push ${LP_USERNAME} ${VERSION}-release
99
100Go to the Launchpad page for the branch and create a merge proposal
101targeted to lp:git-ubuntu, set 'main' as the Target branch and set
102the Description to say "For CI build only". Review type can be set to
103'ci'. This will ensure the regular CI runs on it, which exercises the
104snap build mechanics, but let's the development team know it can be
105ignored for review purposes. This isn't the snap we'll actually be
106using, but will produce one we can download and inspect.
107
108A snap candidate (not yet uploaded to the store) can be installed
109locally for testing like this:
110
111 $ lxc exec ${CONTAINER} -- rm /tmp/git-ubuntu_0+git.*_amd64.snap
112 $ lxc file push ./git-ubuntu_0+git.*_amd64.snap ${CONTAINER}/tmp
113 $ lxc exec ${CONTAINER} -- bash
114 $ sudo snap install --classic --dangerous /tmp/git-ubuntu_0+git.*_amd64.snap
115
116The snap package itself can be locally mounted directly as a filesystem,
117which can be helpful for evaluating its contents. For example, to look
118at what Python modules are included:
119
120 $ mkdir /tmp/snap
121 $ sudo mount git-ubuntu_0+git.59a1e51_amd64.snap /tmp/snap/
122 $ ls /tmp/snap/usr/lib/python3.6/
123 $ cd ${HOME} && umount /tmp/snap && rmdir /tmp/snap
124
125
1264. Release the new version
127---------------------------
128
129Once everything looks good, merge the change from your local release branch to master:
130
131 $ git checkout master
132 $ git merge --ff-only ${VERSION}-release
133
134Make sure everything looks ok. The status should show no uncommitted
135changes, etc. Verify the log shows the correct tags and that HEAD
136points to master, etc. Doublecheck that git describe displays
137${VERSION}:
138
139 $ git status
140 $ git log --oneline --decorate=short
141 $ git describe
142
143If all looks good, now push the annotated tag and code changes to origin:
144
145 $ git push origin master ${VERSION}
146
147
1485. Publish Snap
149---------------
150
151Channels used for delivering the snap package are defined as follows:
152
153 - EDGE: Tracks the latest code in master to allow testing of
154 potentially unstable work. This is not recommended for general
155 usage by end users.
156
157 - BETA: Most of the time, this channel will track the same version as
158 in STABLE, but also delivers release candidates and sometimes may
159 provide early access to new features or bug fixes. This channel is
160 recommended particularly for advanced git-ubuntu users who wish to
161 participate in testing activities. It is also the channel used for
162 the importer on the server.
163
164 - STABLE: This channel tracks the current release used in the
165 git-ubuntu service itself. This is the recommended channel for
166 all end users.
167
168You will initially publish the package to EDGE only to verify it builds
169properly.
170
171First, trigger a rebuild of the snap in the server team's Jenkins
172instance. The git push from step #4 will get picked up by the nightly
173builder, but if you don't wish to wait a day for the build, you can
174manually trigger it on this page:
175
176 https://jenkins.ubuntu.com/server/job/git-ubuntu-ci-nightly/
177
178Make sure you're logged into Jenkins, then click
179
180 "Build Now"
181
182Once this is done, download the snap from Jenkins. It should be listed
183under Last Successful Artifacts on this page:
184
185 https://jenkins.ubuntu.com/server/job/git-ubuntu-ci-nightly/
186
187Next, verify you have your snapcraft account configured, logged in, and
188working locally:
189
190 $ snapcraft whoami
191 $ snap list
192
193Finally, upload the snap to EDGE:
194
195 $ snapcraft push --release edge ./git-ubuntu_${VERSION}+git<whatever>_amd64.snap
196
197The command will block for a few minutes while the store analyzes the
198snap. Once it is approved, it will become available in the edge channel.
199
200For anything but trivial releases, you should then `snap install` the
201edge version of the package in a test environment to verify it.
202
203Once you deem it good to go, use the Snapcraft website
204(https://snapcraft.io/git-ubuntu/releases) to copy the snap to BETA, and
205proceed with installing it in production (next step). Solicit broader
206testing, as appropriate, and then after a sufficient amount of testing
207time (e.g. a week or so) copy the snap to STABLE.
208
209
2106. Installation to Production
211-----------------------------
212
213See our internal process documentation for details on how to do this.
214
215
2167. Announce Release
217-------------------
218
219Email the (gpg signed) announcement to:
220
221 To: ubuntu-devel@lists.ubuntu.com
222 Cc: ubuntu-distributed-devel@lists.ubuntu.com
223
224Upload a copy of the announcement to https://launchpad.net/git-ubuntu/
225
226
2278. Close bugs
228-------------
229
230Close all bugs fixed by this release. Here's an example that can be run
231from `lp-shell` to close all bug tasks marked "Fix Committed". If you
232use this, remember to change `VERSION` appropriately:
233
234 VERSION = '1.0'
235 tasks = list(lp.projects['git-ubuntu'].searchTasks(status='Fix Committed'))
236 bugs = [lp.load(bug_link) for bug_link in set(task.bug_link for task in tasks)]
237 for bug in bugs:
238 bug.newMessage(
239 subject=f'Fix released in git-ubuntu',
240 content=f'Fix released in git-ubuntu version {VERSION}',
241 )
242 for task in tasks:
243 task.status = 'Fix Released'
244 task.lp_save()
245
246
2479. Update Trello Card
248---------------------
249
250If a card hasn't been created in the daily-ubuntu-server board for the
251release task already, add one at this point. Add yourself as a member
252of the card, and add labels 'git-ubuntu' and 'highlight'. The latter
253label flags it to be mentioned in the week's Ubuntu Server Developer Summary.
254
255
25610. Discourse Blogging (Optional)
257--------------------------------
258
259If desired, follow up with one or more topics/posts to
260discourse.ubuntu.com about the major new features included in the
261release. Discourse posts shouldn't be done just for ordinary bug
262fixing, and shouldn't simply mirror the release announcement or usage
263documentation.
264>>>>>>> doc/release-process.md
diff --git a/gitubuntu/changelog_date_overrides.txt b/gitubuntu/changelog_date_overrides.txt
0new file mode 100644265new file mode 100644
index 0000000..cdf5e30
--- /dev/null
+++ b/gitubuntu/changelog_date_overrides.txt
@@ -0,0 +1,19 @@
1<<<<<<< gitubuntu/changelog_date_overrides.txt
2=======
3# Package versions that have illegal dates in their changelog entries.
4# In these cases the first seen publication date must be used instead
5# for the author date of a synthesized commit.
6#
7# Note: this file must exactly match the import specification. Before
8# adding an entry here, adjust the specification first.
9
10ghostscript 9.50~dfsg-5ubuntu4
11gmsh 2.0.7-1.2ubuntu1
12iscsitarget 0.4.15+svn148-2.1ubuntu1
13lxqt-config 0.13.0-0ubuntu4
14mail-spf-perl 2.004-0ubuntu1
15nut 2.2.0-2
16prips 0.9.4-3
17prometheus-alertmanager 0.15.3+ds-3ubuntu1
18software-properties 0.80
19>>>>>>> gitubuntu/changelog_date_overrides.txt
diff --git a/gitubuntu/changelog_tests/maintainer_name_inner_space b/gitubuntu/changelog_tests/maintainer_name_inner_space
0new file mode 10064420new file mode 100644
index 0000000..17cc421
--- /dev/null
+++ b/gitubuntu/changelog_tests/maintainer_name_inner_space
@@ -0,0 +1,8 @@
1<<<<<<< gitubuntu/changelog_tests/maintainer_name_inner_space
2=======
3testpkg (1.0) xenial; urgency=medium
4
5 * Sample entry.
6
7 -- Test Maintainer <test-maintainer@example.com> Thu, 01 Jan 1970 00:00:00 +0000
8>>>>>>> gitubuntu/changelog_tests/maintainer_name_inner_space
diff --git a/gitubuntu/changelog_tests/maintainer_name_leading_space b/gitubuntu/changelog_tests/maintainer_name_leading_space
0new file mode 1006449new file mode 100644
index 0000000..07e9dd8
--- /dev/null
+++ b/gitubuntu/changelog_tests/maintainer_name_leading_space
@@ -0,0 +1,8 @@
1<<<<<<< gitubuntu/changelog_tests/maintainer_name_leading_space
2=======
3testpkg (1.0) xenial; urgency=medium
4
5 * Sample entry.
6
7 -- Test Maintainer <test-maintainer@example.com> Thu, 01 Jan 1970 00:00:00 +0000
8>>>>>>> gitubuntu/changelog_tests/maintainer_name_leading_space
diff --git a/gitubuntu/changelog_tests/maintainer_name_trailing_space b/gitubuntu/changelog_tests/maintainer_name_trailing_space
0new file mode 1006449new file mode 100644
index 0000000..358b4a3
--- /dev/null
+++ b/gitubuntu/changelog_tests/maintainer_name_trailing_space
@@ -0,0 +1,8 @@
1<<<<<<< gitubuntu/changelog_tests/maintainer_name_trailing_space
2=======
3testpkg (1.0) xenial; urgency=medium
4
5 * Sample entry.
6
7 -- Test Maintainer <test-maintainer@example.com> Thu, 01 Jan 1970 00:00:00 +0000
8>>>>>>> gitubuntu/changelog_tests/maintainer_name_trailing_space
diff --git a/gitubuntu/changelog_tests/test_date_1 b/gitubuntu/changelog_tests/test_date_1
0new file mode 1006449new file mode 100644
index 0000000..3c54cec
--- /dev/null
+++ b/gitubuntu/changelog_tests/test_date_1
@@ -0,0 +1,8 @@
1<<<<<<< gitubuntu/changelog_tests/test_date_1
2=======
3testpkg (1.0) xenial; urgency=medium
4
5 * Sample entry.
6
7 -- Test Maintainer <test-maintainer@donotmail.com> Mon, 12 May 2016 08:14:34 -0700
8>>>>>>> gitubuntu/changelog_tests/test_date_1
diff --git a/gitubuntu/changelog_tests/test_date_2 b/gitubuntu/changelog_tests/test_date_2
0new file mode 1006449new file mode 100644
index 0000000..9defc1f
--- /dev/null
+++ b/gitubuntu/changelog_tests/test_date_2
@@ -0,0 +1,8 @@
1<<<<<<< gitubuntu/changelog_tests/test_date_2
2=======
3testpkg (1.0) xenial; urgency=medium
4
5 * Sample entry.
6
7 -- Test Maintainer <test-maintainer@donotmail.com> Mon, 12 May 2016 08:14:34 -0700
8>>>>>>> gitubuntu/changelog_tests/test_date_2
diff --git a/gitubuntu/changelog_tests/test_distribution b/gitubuntu/changelog_tests/test_distribution
0new file mode 1006449new file mode 100644
index 0000000..099b2b2
--- /dev/null
+++ b/gitubuntu/changelog_tests/test_distribution
@@ -0,0 +1,8 @@
1<<<<<<< gitubuntu/changelog_tests/test_distribution
2=======
3testpkg (1.0) xenial; urgency=medium
4
5 * Sample entry.
6
7 -- Test Maintainer <test-maintainer@donotmail.com> Mon, 12 May 2016 08:14:34 -0700
8>>>>>>> gitubuntu/changelog_tests/test_distribution
diff --git a/gitubuntu/changelog_tests/test_distribution_source_1 b/gitubuntu/changelog_tests/test_distribution_source_1
0new file mode 1006449new file mode 100644
index 0000000..dbb4587
--- /dev/null
+++ b/gitubuntu/changelog_tests/test_distribution_source_1
@@ -0,0 +1,8 @@
1<<<<<<< gitubuntu/changelog_tests/test_distribution_source_1
2=======
3testpkg (1.0) xenial; urgency=medium
4
5 * Sample entry.
6
7 -- Test Maintainer <test-maintainer@donotmail.com> Mon, 12 May 2016 08:14:34 -0700
8>>>>>>> gitubuntu/changelog_tests/test_distribution_source_1
diff --git a/gitubuntu/changelog_tests/test_distribution_source_2 b/gitubuntu/changelog_tests/test_distribution_source_2
0new file mode 1006449new file mode 100644
index 0000000..04abafe
--- /dev/null
+++ b/gitubuntu/changelog_tests/test_distribution_source_2
@@ -0,0 +1,8 @@
1<<<<<<< gitubuntu/changelog_tests/test_distribution_source_2
2=======
3testpkg (1.0) zesty-security; urgency=medium
4
5 * Sample entry.
6
7 -- Test Maintainer <test-maintainer@donotmail.com> Mon, 12 May 2016 08:14:34 -0700
8>>>>>>> gitubuntu/changelog_tests/test_distribution_source_2
diff --git a/gitubuntu/changelog_tests/test_distribution_source_3 b/gitubuntu/changelog_tests/test_distribution_source_3
0new file mode 1006449new file mode 100644
index 0000000..f89aa4a
--- /dev/null
+++ b/gitubuntu/changelog_tests/test_distribution_source_3
@@ -0,0 +1,8 @@
1<<<<<<< gitubuntu/changelog_tests/test_distribution_source_3
2=======
3testpkg (1.0) unstable; urgency=medium
4
5 * Sample entry.
6
7 -- Test Maintainer <test-maintainer@donotmail.com> Mon, 12 May 2016 08:14:34 -0700
8>>>>>>> gitubuntu/changelog_tests/test_distribution_source_3
diff --git a/gitubuntu/changelog_tests/test_distribution_source_4 b/gitubuntu/changelog_tests/test_distribution_source_4
0new file mode 1006449new file mode 100644
index 0000000..8c8fb68
--- /dev/null
+++ b/gitubuntu/changelog_tests/test_distribution_source_4
@@ -0,0 +1,8 @@
1<<<<<<< gitubuntu/changelog_tests/test_distribution_source_4
2=======
3testpkg (1.0) devel; urgency=medium
4
5 * Sample entry.
6
7 -- Test Maintainer <test-maintainer@donotmail.com> Mon, 12 May 2016 08:14:34 -0700
8>>>>>>> gitubuntu/changelog_tests/test_distribution_source_4
diff --git a/gitubuntu/changelog_tests/test_maintainer_1 b/gitubuntu/changelog_tests/test_maintainer_1
0new file mode 1006449new file mode 100644
index 0000000..62a89ff
--- /dev/null
+++ b/gitubuntu/changelog_tests/test_maintainer_1
@@ -0,0 +1,8 @@
1<<<<<<< gitubuntu/changelog_tests/test_maintainer_1
2=======
3testpkg (1.0) xenial; urgency=medium
4
5 * Sample entry.
6
7 -- Test Maintainer <test-maintainer@donotmail.com> Thu, 01 Jan 1970 00:00:00 +0000
8>>>>>>> gitubuntu/changelog_tests/test_maintainer_1
diff --git a/gitubuntu/changelog_tests/test_maintainer_2 b/gitubuntu/changelog_tests/test_maintainer_2
0new file mode 1006449new file mode 100644
index 0000000..7b8db29
--- /dev/null
+++ b/gitubuntu/changelog_tests/test_maintainer_2
@@ -0,0 +1,8 @@
1<<<<<<< gitubuntu/changelog_tests/test_maintainer_2
2=======
3testpkg (1.0) xenial; urgency=medium
4
5 * Sample entry.
6
7 -- <test-maintainer@donotmail.com> Thu, 01 Jan 1970 00:00:00 +0000
8>>>>>>> gitubuntu/changelog_tests/test_maintainer_2
diff --git a/gitubuntu/changelog_tests/test_maintainer_3 b/gitubuntu/changelog_tests/test_maintainer_3
0new file mode 1006449new file mode 100644
index 0000000..195564a
--- /dev/null
+++ b/gitubuntu/changelog_tests/test_maintainer_3
@@ -0,0 +1,8 @@
1<<<<<<< gitubuntu/changelog_tests/test_maintainer_3
2=======
3testpkg (1.0) xenial; urgency=medium
4
5 * Sample entry.
6
7 -- <test-maintainer@donotmail.com> Thu, 01 Jan 1970 00:00:00 +0000
8>>>>>>> gitubuntu/changelog_tests/test_maintainer_3
diff --git a/gitubuntu/changelog_tests/test_versions_1 b/gitubuntu/changelog_tests/test_versions_1
0new file mode 1006449new file mode 100644
index 0000000..ec94bc5
--- /dev/null
+++ b/gitubuntu/changelog_tests/test_versions_1
@@ -0,0 +1,8 @@
1<<<<<<< gitubuntu/changelog_tests/test_versions_1
2=======
3testpkg (1.0) xenial; urgency=medium
4
5 * Sample entry.
6
7 -- Test Maintainer <test-maintainer@donotmail.com> Mon, 12 May 2016 08:14:34 -0700
8>>>>>>> gitubuntu/changelog_tests/test_versions_1
diff --git a/gitubuntu/changelog_tests/test_versions_2 b/gitubuntu/changelog_tests/test_versions_2
0new file mode 1006449new file mode 100644
index 0000000..ed6ade9
--- /dev/null
+++ b/gitubuntu/changelog_tests/test_versions_2
@@ -0,0 +1,14 @@
1<<<<<<< gitubuntu/changelog_tests/test_versions_2
2=======
3testpkg (2.0) xenial; urgency=medium
4
5 * Sample entry 2.
6
7 -- Test Maintainer <test-maintainer@donotmail.com> Mon, 27 Aug 2016 12:10:34 -0700
8
9testpkg (1.0) xenial; urgency=medium
10
11 * Sample entry 1.
12
13 -- Test Maintainer <test-maintainer@donotmail.com> Mon, 12 May 2016 08:14:34 -0700
14>>>>>>> gitubuntu/changelog_tests/test_versions_2
diff --git a/gitubuntu/changelog_tests/test_versions_3 b/gitubuntu/changelog_tests/test_versions_3
0new file mode 10064415new file mode 100644
index 0000000..8fbf944
--- /dev/null
+++ b/gitubuntu/changelog_tests/test_versions_3
@@ -0,0 +1,26 @@
1<<<<<<< gitubuntu/changelog_tests/test_versions_3
2=======
3testpkg (4.0) zesty; urgency=medium
4
5 * Sample entry 4.
6
7 -- Test Maintainer <test-maintainer@donotmail.com> Mon, 03 Apr 2017 18:04:01 -0700
8
9testpkg (3.0) yakkety; urgency=medium
10
11 * Sample entry 3.
12
13 -- Test Maintainer <test-maintainer@donotmail.com> Fri, 10 Nov 2016 03:34:10 -0700
14
15testpkg (2.0) xenial; urgency=medium
16
17 * Sample entry 2.
18
19 -- Test Maintainer <test-maintainer@donotmail.com> Sat, 27 Aug 2016 12:10:55 -0700
20
21testpkg (1.0) xenial; urgency=medium
22
23 * Sample entry 1.
24
25 -- Test Maintainer <test-maintainer@donotmail.com> Thu, 12 May 2016 08:14:34 -0700
26>>>>>>> gitubuntu/changelog_tests/test_versions_3
diff --git a/gitubuntu/clone.py b/gitubuntu/clone.py
0new file mode 10064427new file mode 100644
index 0000000..aff52be
--- /dev/null
+++ b/gitubuntu/clone.py
@@ -0,0 +1,178 @@
1<<<<<<< gitubuntu/clone.py
2=======
3import argparse
4import logging
5import os
6import re
7import shutil
8from subprocess import CalledProcessError
9import sys
10from gitubuntu.__main__ import top_level_defaults
11from gitubuntu.git_repository import (
12 GitUbuntuRepository,
13 GitUbuntuRepositoryFetchError,
14)
15from gitubuntu.run import decode_binary, run
16
17import pkg_resources
18import pygit2
19
20def copy_hooks(src, dst):
21 try:
22 os.mkdir(dst)
23 except FileExistsError:
24 pass
25
26 for hook in os.listdir(src):
27 shutil.copy2(
28 os.path.join(src, hook),
29 dst,
30 )
31
32def main(
33 package,
34 directory=None,
35 lp_user=None,
36 proto=top_level_defaults.proto,
37):
38 """Entry point to clone subcommand
39
40 @package: Name of source package
41 @directory: directory to clone the repository into
42 @lp_user: user to authenticate to Launchpad as
43 @proto: string protocol to use (one of 'http', 'https', 'git')
44
45 If directory is None, a relative directory with the same name as
46 package will be used.
47
48 If lp_user is None, value of `git config gitubuntu.lpuser` will be
49 used.
50
51 Returns the resulting GitUbuntuRepository object, if successful;
52 None otherwise.
53 """
54 directory = (
55 os.path.abspath(directory)
56 if directory
57 else os.path.join(os.path.abspath(os.getcwd()), package)
58 )
59 if os.path.isdir(directory):
60 logging.error('directory %s exists' % directory)
61 return None
62
63 local_repo = GitUbuntuRepository(
64 local_dir=directory,
65 lp_user=lp_user,
66 fetch_proto=proto,
67 )
68
69 copy_hooks(
70 pkg_resources.resource_filename(
71 'gitubuntu',
72 'hooks',
73 ),
74 os.path.join(
75 directory,
76 os.getenv('GIT_DIR', '.git'),
77 'hooks',
78 ),
79 )
80
81 local_repo.add_base_remotes(package)
82 try:
83 local_repo.fetch_base_remotes(verbose=True)
84 except GitUbuntuRepositoryFetchError:
85 logging.error("Unable to find an imported repository for %s. "
86 "Please request an import by e-mailing "
87 "ubuntu-distributed-devel@lists.ubuntu.com.",
88 package
89 )
90 shutil.rmtree(local_repo.local_dir)
91 return None
92
93 local_repo.add_lpuser_remote(pkgname=package)
94 logging.debug("added remote '%s' -> %s", local_repo.lp_user,
95 local_repo.raw_repo.remotes[local_repo.lp_user].url
96 )
97 try:
98 local_repo.fetch_lpuser_remote(verbose=True)
99 except GitUbuntuRepositoryFetchError:
100 pass
101
102 try:
103 local_repo.create_tracking_branch(
104 'ubuntu/devel',
105 'pkg/ubuntu/devel'
106 )
107 local_repo.checkout_commitish('ubuntu/devel')
108 except:
109 logging.error('Unable to checkout ubuntu/devel, does '
110 'pkg/ubuntu/devel branch exist?'
111 )
112
113 local_repo.git_run(['config', 'notes.displayRef', 'refs/notes/changelog'])
114
115 if os.path.isfile(os.path.join(directory, '.gitignore')):
116 logging.warning('A .gitignore file exists in the source '
117 'package. This will affect the behavior of git. Consider '
118 'backing up the gitignore while working on this package '
119 'to ensure all changes are tracked or passing appropriate '
120 'flags to git commands (e.g., git status --ignored).'
121 )
122
123 return local_repo
124
125def parse_args(subparsers=None, base_subparsers=None):
126 kwargs = dict(
127 description='Clone a source package git repository to a directory',
128 formatter_class=argparse.RawTextHelpFormatter,
129 epilog='''
130Example:
131 * clone to open-iscsi/
132 %(prog)s open-iscsi
133 * clone to ubuntu.git
134 %(prog)s open-iscsi ubuntu.git
135 * use git rather than https protocol for remotes:
136 %(prog)s --proto=git open-iscsi
137'''
138 )
139 if base_subparsers:
140 kwargs['parents'] = base_subparsers
141 if subparsers:
142 parser = subparsers.add_parser('clone', **kwargs)
143 parser.set_defaults(func=cli_main)
144 else:
145 parser = argparse.ArgumentParser(**kwargs)
146 parser.add_argument('package', type=str,
147 help='Name of source package to clone'
148 )
149 parser.add_argument('directory', type=str,
150 help='Local directory to clone to. If not specified, a '
151 ' directory with the same name as PACKAGE will be '
152 'used',
153 default=None,
154 nargs='?'
155 )
156 parser.add_argument('-l', '--lp-user', type=str, help=argparse.SUPPRESS)
157 if not subparsers:
158 return parser.parse_args()
159 return 'clone - %s' % kwargs['description']
160
161def cli_main(args):
162 try:
163 lp_user = args.lp_user
164 except AttributeError:
165 lp_user = None
166
167 if main(
168 package=args.package,
169 directory=args.directory,
170 lp_user=lp_user,
171 proto=args.proto,
172 ) is not None:
173 return 0
174 return 1
175
176
177# vi: ts=4 expandtab
178>>>>>>> gitubuntu/clone.py
diff --git a/gitubuntu/git_repository.py b/gitubuntu/git_repository.py
0new file mode 100644179new file mode 100644
index 0000000..9a81860
--- /dev/null
+++ b/gitubuntu/git_repository.py
@@ -0,0 +1,3026 @@
1<<<<<<< gitubuntu/git_repository.py
2=======
3### XXX: can we reduce number of calls to dpkg-parsechangelog
4### XXX: is any of this data in lp already?
5
6import collections
7from contextlib import contextmanager
8from copy import copy
9import datetime
10import enum
11from functools import lru_cache
12import itertools
13import logging
14import os
15import posixpath
16import re
17import shutil
18import stat
19from subprocess import CalledProcessError
20import sys
21import tempfile
22from gitubuntu.__main__ import top_level_defaults
23import gitubuntu.build
24from gitubuntu.dsc import component_tarball_matches
25from gitubuntu.patch_state import PatchState
26from gitubuntu.run import (
27 decode_binary,
28 run,
29 runq,
30 run_gbp,
31 run_quilt,
32)
33import gitubuntu.spec
34from gitubuntu.test_util import get_test_changelog
35import gitubuntu.versioning
36import debian.changelog
37import debian.debian_support
38import pygit2
39import pytest
40
41
42def _follow_symlinks_to_blob(repo, top_tree_object, search_path,
43 _rel_tree=None, _rel_path=''
44):
45 '''Recursively follow a path down a tree, following symlinks, to find blob
46
47 repo: pygit2.Repository object
48 top_tree: pygit2.Tree object of the top of the tree structure
49 search_path: '/'-separated path string of blob to find
50 _rel_tree: (internal) which tree to look further into
51 _rel_path: (internal) the path we are in so far
52 '''
53
54 NORMAL_BLOB_MODES = set([
55 pygit2.GIT_FILEMODE_BLOB,
56 pygit2.GIT_FILEMODE_BLOB_EXECUTABLE,
57 ])
58
59 _rel_tree = _rel_tree or top_tree_object
60 head, tail = posixpath.split(search_path)
61
62 # A traditional functional split would put a single entry in head with tail
63 # empty, but posixpath.split doesn't necessarily do this. Jiggle it round
64 # to make it appear to have traditional semantics.
65 if not head:
66 head = tail
67 tail = None
68
69 entry = _rel_tree[head]
70 if entry.type in [pygit2.GIT_OBJ_TREE, 'tree']:
71 return _follow_symlinks_to_blob(
72 repo=repo,
73 top_tree_object=top_tree_object,
74 search_path=tail,
75 _rel_tree=repo.get(entry.id),
76 _rel_path=posixpath.join(_rel_path, head),
77 )
78 elif entry.type in [pygit2.GIT_OBJ_BLOB, 'blob'] and entry.filemode == pygit2.GIT_FILEMODE_LINK:
79 # Found a symlink. Start again from the top with adjustment for symlink
80 # following
81 target_tail = [decode_binary(repo.get(entry.id).data)]
82 if tail is not None:
83 target_tail.append(tail)
84 search_path = posixpath.normpath(
85 posixpath.join(_rel_path, *target_tail)
86 )
87 return _follow_symlinks_to_blob(
88 repo=repo,
89 top_tree_object=top_tree_object,
90 search_path=search_path,
91 )
92 elif entry.type in [pygit2.GIT_OBJ_BLOB, 'blob'] and entry.filemode in NORMAL_BLOB_MODES:
93 return repo.get(entry.id)
94 else:
95 # Found some special entry such as a "gitlink" (submodule entry)
96 raise ValueError(
97 "Found %r filemode %r looking for %r" %
98 (entry, entry.filemode, posixpath.join(_rel_path, search_path))
99 )
100
101
102def follow_symlinks_to_blob(repo, treeish_object, path):
103 return _follow_symlinks_to_blob(
104 repo=repo,
105 top_tree_object=treeish_object.peel(pygit2.Tree),
106 search_path=posixpath.normpath(path),
107 )
108
109
110def _derive_git_cli_env(
111 pygit2_repo,
112 initial_env=None,
113 update_env=None,
114 work_tree_path=None,
115 index_path=None,
116):
117 """Calculate the environment to be used in a call to the git CLI
118
119 :param pygit2.Repository pygit2_repo: the repository for which to calculate
120 the environment
121 :param dict initial_env: the environment to start with
122 :param dict update_env: additional environment setings with which to
123 override the result
124 :param str work_tree_path: in the case of an alternate work tree being
125 used, specify this here and GIT_WORK_TREE will be set to it instead of
126 the default being taken from the work tree used by pygit2_repo
127 :param str index_path: if an alternate index is being used, specify it here
128 and GIT_INDEX_FILE will be set accordingly.
129 :rtype: dict
130 :returns: a dictionary representing the environment with which to call the
131 git CLI
132
133 This function encapsulates the setting of the GIT_DIR, GIT_WORK_TREE and
134 GIT_INDEX_FILE environment variables as necessary. The provided
135 pygit2.Repository instance is used to determine these values. initial_env,
136 if provided, specifies the initial environment to use instead of defaulting
137 to the process' current environment. update_env allows extra environment
138 variables to be added as well as the override of any variables set by this
139 function, including GIT_DIR, GIT_WORK_TREE and GIT_INDEX_FILE.
140 """
141 if initial_env is None:
142 env = os.environ.copy()
143 else:
144 env = initial_env.copy()
145
146 env['GIT_DIR'] = pygit2_repo.path
147
148 if work_tree_path is None:
149 env['GIT_WORK_TREE'] = pygit2_repo.workdir
150 else:
151 env['GIT_WORK_TREE'] = work_tree_path
152
153 if index_path is not None:
154 env['GIT_INDEX_FILE'] = index_path
155
156 if update_env:
157 env.update(update_env)
158
159 return env
160
161
162def _derive_target_branch_string(remote_branch_objects):
163 '''Given a list of branch objects, return the name of the one to use as the target branch
164
165 Returns either one of the branch objects' names, or the empty string
166 to indicate no suitable candidate.
167 '''
168 if len(remote_branch_objects) == 0:
169 logging.error("Unable to automatically determine importer "
170 "branch: No candidate branches found."
171 )
172 return ''
173 remote_branch_strings = [
174 b.branch_name for b in remote_branch_objects
175 ]
176 if len(remote_branch_objects) > 1:
177 # do the trees of each branch's tip match?
178 if len(
179 set(b.peel(pygit2.Tree).id for b in remote_branch_objects)
180 ) != 1:
181 logging.error("Unable to automatically determine importer "
182 "branch: Multiple candidate branches found and "
183 "their trees do not match: %s. This might be a "
184 "bug in `git ubuntu lint`, please report it at "
185 "https://bugs.launchpad.net/git-ubuntu. "
186 "Please pass --target-branch.",
187 ", ".join(remote_branch_strings)
188 )
189 return ''
190 # is ubuntu/devel one of the candidates?
191 try:
192 return [
193 b for b in remote_branch_strings if 'ubuntu/devel' in b
194 ].pop()
195 except IndexError:
196 pass
197 # are all candidate branches for the same series?
198 pkg_remote_branch_serieses = set(
199 # remove the prefix, trim the distribution and
200 # extract the series
201 b[len('pkg/'):].split('/')[1].split('-')[0] for
202 b in remote_branch_strings
203 )
204 if len(pkg_remote_branch_serieses) != 1:
205 logging.error("Unable to automatically determine importer "
206 "branch: Multiple candidate branches found and "
207 "they do not target the same series: %s. Please pass "
208 "--target-branch.", ", ".join(remote_branch_strings)
209 )
210 return ''
211 # is a -devel branch present?
212 if not any('-devel' in b for b in remote_branch_strings):
213 logging.error("Unable to automatically determine importer "
214 "branch: Multiple candidate branches found and "
215 "none appear to be a -devel branch: %s. Please "
216 "pass --target-branch.", ", ".join(remote_branch_strings)
217 )
218 return ''
219 # if so, favor -devel
220 remote_branch_strings = [
221 b for b in remote_branch_strings if '-devel' in b
222 ]
223 return remote_branch_strings.pop()
224
225def derive_target_branch(repo, commitish_string, namespace='pkg'):
226 return _derive_target_branch_string(
227 repo.nearest_remote_branches(commitish_string, namespace)
228 )
229
230
231def git_run(
232 pygit2_repo,
233 args,
234 initial_env=None,
235 update_env=None,
236 work_tree_path=None,
237 index_path=None,
238 **kwargs
239):
240 """Run the git CLI with the provided arguments
241
242 :param pygit2.Repository: the repository on which to act
243 :param list(str) args: arguments to the git CLI
244 :param dict initial_env: the environment to use
245 :param dict update_env: additional environment variables and overrides
246 :param dict **kwargs: further arguments to pass through to
247 gitubuntu.run.run()
248 :raises subprocess.CalledProcessError: if git exits non-zero
249 :rtype: (str, str)
250 :returns: stdout and stderr strings containing the subprocess output
251
252 If initial_env is not set, it defaults to the current process' environment.
253
254 The GIT_DIR, GIT_WORK_TREE and GIT_INDEX_FILE environment variables are set
255 automatically as necessary based on the repository's existing location and
256 settings.
257
258 If update_env is set, then the environment to be used is updated with env
259 before the call to git is made. This can override GIT_DIR,
260 GIT_WORK_TREE, GIT_INDEX_FILE and anything else.
261 """
262 env = _derive_git_cli_env(
263 pygit2_repo=pygit2_repo,
264 initial_env=initial_env,
265 update_env=update_env,
266 work_tree_path=work_tree_path,
267 index_path=index_path,
268 )
269 return run(['git'] + list(args), env=env, **kwargs)
270
271
272class RenameableDir:
273 """An on-disk directory that can be renamed and traversed recursively.
274
275 This is a thin wrapper around a filesystem path string (and must be
276 instantiated with one). Methods and attributes are modeled around a
277 py.path, but we do not use py.path as we don't really need its
278 functionality and it would add another dependency. This interface allows
279 for filesystem operations to be easily faked with a FakeRenameableDir for
280 testing consumers of this class.
281
282 One wart around renaming and py.path is that once renamed a py.path object
283 becomes useless as it no longer validly refers to an on-disk path. Rather
284 than supporting a rename method, this wrapper provides a basename
285 setter to handle the rename and replacement wrapped string object
286 transparently. This moves complexity away from the class consumer, allowing
287 the consumer to be tested more easily.
288
289 Since the underlying purpose of this class is to handle manipulations of a
290 directory tree for adjustments needed during import/export, symlink
291 handling is effectively "turned off" in the specification of this class.
292 Symlinks to directories are not recursed into; they are handled no
293 differently to a regular file, in the same manner as lstat(2).
294 """
295 def __init__(self, path):
296 """Create a new RenameableDir instance.
297
298 :param str path: the on-disk directory to wrap, which must exist. For
299 symlinks, it is the symlink itself that must exist; the existence
300 of a symlink's target does not matter.
301 :raises FileNotFoundError: if the path supplied does not exist.
302 """
303 # Ignore the return value of os.lstat(); this call is used to raise
304 # FileNotFoundError if the path does not exist (as required in the spec
305 # specified by the docstring), or succeed otherwise. The
306 # call for os.path.lexists() would use the same underlying system call
307 # anyway, so this is equivalent and this way we end up with a full
308 # FileNotFoundError exception created for us with all the correct
309 # parameters.
310 os.lstat(path)
311
312 self._path = path
313
314 @property
315 def basename(self):
316 """The name of the directory itself."""
317 return os.path.basename(self._path)
318
319 @basename.setter
320 def basename(self, new_basename):
321 """Rename this directory."""
322 renamed_path = os.path.join(os.path.dirname(self._path), new_basename)
323 os.rename(self._path, renamed_path)
324 self._path = renamed_path
325
326 def listdir(self, fil=lambda x: True):
327 """Return subdirectory objects.
328
329 :param fil: a function that, given a basename, returns a boolean
330 indicating whether or not the corresponding object should be
331 returned in the results.
332 """
333 return [
334 RenameableDir(os.path.join(self._path, p))
335 for p in os.listdir(self._path)
336 if fil(p)
337 ]
338
339 @property
340 def recursive(self):
341 """Indicate if this object can contain subdirectory objects.
342
343 An object representing a file will return False. An object representing
344 a directory will return True, even if it is empty.
345
346 Symlinks return False even if they point to a directory. Broken
347 symlinks also always return False.
348
349 :rtype: bool
350 """
351 st = os.stat(self._path, follow_symlinks=False)
352 return stat.S_ISDIR(st.st_mode)
353
354 def __str__(self):
355 return str(self._path)
356
357 def __repr__(self):
358 return 'RenameableDir(%r)' % str(self)
359
360 def __hash__(self):
361 # https://stackoverflow.com/q/2909106/478206
362 return hash((
363 type(self),
364 self._path
365 ))
366
367 def __eq__(self, other):
368 return hash(self) == hash(other)
369
370
371class FakeRenameableDir:
372 """A fake RenameDir that retains its structure in memory.
373
374 This is useful for testing consumers of a RenameableDir.
375
376 In addition, renames are recorded and those records passed up to parent
377 FakeRenameableDir objects so that the order of renames that occur can be
378 checked later.
379 """
380 def __init__(self, basename, subdirs):
381 """Create a new RenameableDir instance.
382
383 :param str basename: the basename of this instance.
384 :param subdirs: FakeRenameableDir objects contained within this one.
385 For non-recursive objects (such as those intended to represent
386 files), use None.
387 :type subdirs: list(FakeRenameableDir)
388 """
389 self._basename = basename
390 self._subdirs = subdirs
391
392 if self._subdirs:
393 for subdir in self._subdirs:
394 subdir._parent = self
395
396 self._parent = None
397 self._rename_record = []
398
399 @property
400 def basename(self):
401 return self._basename
402
403 @basename.setter
404 def basename(self, new_basename):
405 self._record_rename(self)
406 self._basename = new_basename
407
408 def _record_rename(self, obj):
409 self._rename_record.append(obj)
410 if self._parent:
411 self._parent._record_rename(obj)
412
413 def listdir(self, fil=lambda x: True):
414 return (subdir for subdir in self._subdirs if fil(subdir.basename))
415
416 @property
417 def recursive(self):
418 return self._subdirs is not None
419
420 def __hash__(self):
421 # https://stackoverflow.com/q/2909106/478206
422 return hash((
423 type(self),
424 self.basename,
425 None if self._subdirs is None else tuple(self._subdirs),
426 ))
427
428 def __eq__(self, other):
429 return hash(self) == hash(other)
430
431 def __repr__(self):
432 return 'FakeRenameableDir(%r, %r)' % (self.basename, self._subdirs)
433
434
435_dot_git_match = re.compile(r'^\.+git$').search
436_EscapeDirection = enum.Enum('EscapeDirection', ['ESCAPE', 'UNESCAPE'])
437
438
439def _escape_unescape_dot_git(path, direction):
440 """Escape or unescape .git entries in a directory recursively.
441
442 :param RenameableDir path: top of directory tree to escape or unescape.
443 :param _EscapeDirection direction: whether to escape or unescape.
444
445 Escaping rules:
446 .git -> ..git
447 ..git -> ...git
448 ...git -> ....git
449 etc.
450
451 All these escaping rules apply all of the time, regardless of whether
452 or not .git exists. Only names matching '.git' with zero or more '.'
453 prepended are touched.
454
455 This allows any directory tree to be losslessly stored in git, since git
456 does not permit entries named '.git'.
457
458 Unescaping is the inverse of escaping. Before unescaping, an entry called
459 '.git' must not exist. If it does, RuntimeError is raised, and the
460 directory is left in an undefined (probably partially unescaped) state.
461 """
462 # When escaping, we have to rename ..git to ...git before renaming .git to
463 # ..git in order to make room, and the reverse for unescaping. If we do the
464 # renames ordered by length of name, we can meet this requirement.
465 # Escaping: order by longest first; unescaping: order by shortest first.
466 sorted_subpaths_to_rename = sorted(
467 path.listdir(fil=_dot_git_match),
468 key=lambda p: len(p.basename),
469 reverse=direction is _EscapeDirection.ESCAPE,
470 )
471 for entry in sorted_subpaths_to_rename:
472 if direction is _EscapeDirection.ESCAPE:
473 # Add a leading '.'
474 entry.basename = '.' + entry.basename
475 else:
476 assert direction is _EscapeDirection.UNESCAPE
477 if entry.basename == '.git':
478 raise RuntimeError(
479 "%s exists but is invalid when unescaping" % entry,
480 )
481 # Drop the leading '.'
482 assert entry.basename[0] == '.'
483 entry.basename = entry.basename[1:]
484
485 # Traverse the entire directory for recursive escapes;
486 # sorted_subpaths_to_rename is already filtered so is not complete by
487 # itself
488 for entry in path.listdir():
489 if entry.recursive:
490 _escape_unescape_dot_git(entry, direction=direction)
491
492
493def escape_dot_git(path):
494 """Apply .git escaping to a filesystem path.
495
496 :param str path: path to filesystem to change
497 """
498 return _escape_unescape_dot_git(
499 path=RenameableDir(path),
500 direction=_EscapeDirection.ESCAPE,
501 )
502
503
504def unescape_dot_git(path):
505 """Unapply .git escaping to a filesystem path.
506
507 :param str path: path to filesystem to change
508
509 Any entry (including recursively) called '.git' in path is an error and
510 will raise a RuntimeError. If an exception is raised, path may be left in a
511 partially unescaped state.
512 """
513 return _escape_unescape_dot_git(
514 path=RenameableDir(path),
515 direction=_EscapeDirection.UNESCAPE,
516 )
517
518
519class ChangelogError(Exception):
520 pass
521
522class Changelog:
523 '''Representation of a debian/changelog file found inside a git tree-ish
524
525 Uses dpkg-parsechangelog for parsing, but when this fails we fall
526 back to grep/sed-based pattern matching automatically.
527 '''
528 def __init__(self, content_bytes):
529 '''
530 contents: bytes string of file contents
531 '''
532 self._contents = content_bytes
533 try:
534 self._changelog = debian.changelog.Changelog(
535 self._contents,
536 strict=True
537 )
538 if not len(self._changelog.versions):
539 # assume bad read, so fall back to shell later
540 self._changelog = None
541 except (
542 UnicodeDecodeError,
543 ValueError,
544 debian.changelog.ChangelogParseError
545 ):
546 self._changelog = None
547
548 @classmethod
549 def from_treeish(cls, repo, treeish_object):
550 '''
551 repo: pygit2.Repository instance
552 treeish_object: pygit2.Object subclass instance (must peel to pygit2.Tree)
553 '''
554 blob = follow_symlinks_to_blob(
555 repo=repo,
556 treeish_object=treeish_object,
557 path='debian/changelog'
558 )
559 return cls(blob.data)
560
561 @classmethod
562 def from_path(cls, path):
563 with open(path, 'rb') as f:
564 return cls(f.read())
565
566 @lru_cache()
567 def _dpkg_parsechangelog(self, parse_params):
568 stdout, _ = run(
569 'dpkg-parsechangelog -l- %s' % parse_params,
570 input=self._contents,
571 shell=True,
572 verbose_on_failure=False,
573 )
574 return stdout.strip()
575
576 @lru_cache()
577 def _shell(self, cmd):
578 stdout, _ = run(
579 cmd,
580 input=self._contents,
581 shell=True,
582 verbose_on_failure=False,
583 )
584 return stdout.strip()
585
586 @property
587 def _shell_version(self):
588 parse_params = '-n1 -SVersion'
589 shell_cmd = "grep -m1 '^\\S' | sed 's/.*(\\(.*\\)).*/\\1/'"
590 try:
591 raw_out = self._dpkg_parsechangelog(parse_params)
592 except CalledProcessError:
593 raw_out = self._shell(shell_cmd)
594 return None if raw_out == '' else raw_out
595
596 @property
597 def upstream_version(self):
598 if self._changelog:
599 return self._changelog.upstream_version
600 version = self._shell_version
601 m = debian.debian_support.Version.re_valid_version.match(version)
602 if m is None:
603 raise ValueError("Invalid version string: %s", version)
604 return m.group('upstream_version')
605
606 @property
607 def version(self):
608 if self._changelog:
609 try:
610 ret = str(self._changelog.versions[0]).strip()
611 shell_version = self._shell_version
612 if shell_version != 'unknown' and ret != shell_version:
613 raise ChangelogError(
614 'Old (%s) and new (%s) changelog values do not agree' %
615 (self._shell_version, ret)
616 )
617 return ret
618 except IndexError:
619 return None
620 return self._shell_version
621
622 @property
623 def _shell_previous_version(self):
624 parse_params = '-n1 -o1 -SVersion'
625 shell_cmd = "grep -m1 '^\\S' | tail -1 | sed 's/.*(\\(.*\\)).*/\\1/'"
626 try:
627 raw_out = self._dpkg_parsechangelog(parse_params)
628 except CalledProcessError:
629 raw_out = self._shell(shell_cmd)
630 return None if raw_out == '' else raw_out
631
632 @property
633 def previous_version(self):
634 if self._changelog:
635 try:
636 ret = str(self._changelog.versions[1]).strip()
637 if ret != self._shell_previous_version:
638 raise ChangelogError(
639 'Old (%s) and new (%s) changelog values do not agree' %
640 (self._shell_previous_version, ret)
641 )
642 return ret
643 except IndexError:
644 return None
645 return self._shell_previous_version
646
647 @property
648 def _shell_maintainer(self):
649 parse_params = '-SMaintainer'
650 shell_cmd = "grep -m1 '^ --' | sed 's/ -- \\(.*\\) \\(.*\\)/\\1/'"
651 try:
652 return self._dpkg_parsechangelog(parse_params)
653 except CalledProcessError:
654 return self._shell(shell_cmd)
655
656 @property
657 def maintainer(self):
658 if self._changelog:
659 ret = self._changelog.author.strip()
660 if ret != self._shell_maintainer:
661 raise ChangelogError(
662 'Old (%s) and new (%s) changelog values do not agree' %
663 (self._shell_maintainer, ret)
664 )
665 else:
666 ret = self._shell_maintainer
667 if not ret:
668 raise ValueError("Unable to parse maintainer from changelog")
669 return ret
670
671 @property
672 def _shell_date(self):
673 parse_params = '-SDate'
674 shell_cmd = "grep -m1 '^ --' | sed 's/ -- \\(.*\\) \\(.*\\)/\\2/'"
675 try:
676 return self._dpkg_parsechangelog(parse_params)
677 except CalledProcessError:
678 return self._shell(shell_cmd)
679
680 @property
681 def date(self):
682 if self._changelog:
683 ret = self._changelog.date.strip()
684 if ret != self._shell_date:
685 raise ChangelogError(
686 'Old (%s) and new (%s) changelog values do not agree' %
687 (self._shell_date, ret)
688 )
689 return ret
690 return self._shell_date
691
692 @property
693 def _shell_all_versions(self):
694 parse_params = '--format rfc822 -SVersion --all'
695 shell_cmd = "grep '^\\S' | sed 's/.*(\\(.*\\)).*/\\1/'"
696 try:
697 version_lines = self._dpkg_parsechangelog(parse_params)
698 except CalledProcessError:
699 version_lines = self._shell(shell_cmd)
700 return [
701 v_stripped
702 for v_stripped in (
703 v.strip() for v in version_lines.splitlines()
704 )
705 if v_stripped
706 ]
707
708 @property
709 def all_versions(self):
710 if self._changelog:
711 ret = [str(v).strip() for v in self._changelog.versions]
712 shell_all_versions = self._shell_all_versions
713 is_equivalent = (
714 len(ret) == len(shell_all_versions) and
715 all(
716 shell_version == 'unknown' or shell_version == api_version
717 for shell_version, api_version
718 in zip(shell_all_versions, ret)
719 )
720 )
721 if not is_equivalent:
722 raise ChangelogError(
723 "Old and new changelog values do not agree"
724 )
725 return ret
726 else:
727 return self._shell_all_versions
728
729 @property
730 def _shell_distribution(self):
731 parse_params = '-SDistribution'
732 shell_cmd = "grep -m1 '^\\S' | sed 's/.*\\ .*\\ \\(.*\\);.*/\\1/'"
733 try:
734 return self._dpkg_parsechangelog(parse_params)
735 except CalledProcessError:
736 return self._shell(shell_cmd)
737
738 @property
739 def distribution(self):
740 if self._changelog:
741 ret = self._changelog.distributions
742 if ret != self._shell_distribution:
743 raise ChangelogError(
744 'Old (%s) and new (%s) changelog values do not agree' %
745 (self._shell_distribution, ret)
746 )
747 return ret
748 return self._shell_distribution
749
750 @property
751 def _shell_srcpkg(self):
752 parse_params = '-SSource'
753 shell_cmd = "grep -m1 '^\\S' | sed 's/\\(.*\\)\\ .*\\ .*;.*/\\1/'"
754 try:
755 return self._dpkg_parsechangelog(parse_params)
756 except CalledProcessError:
757 return self._shell(shell_cmd)
758
759 @property
760 def srcpkg(self):
761 if self._changelog:
762 ret = self._changelog.package.strip()
763 if ret != self._shell_srcpkg:
764 raise ChangelogError(
765 'Old (%s) and new (%s) changelog values do not agree' %
766 (self._shell_srcpkg, ret)
767 )
768 return ret
769 return self._shell_srcpkg
770
771 @staticmethod
772 def _parse_changelog_date(changelog_timestamp_string):
773 """Convert changelog timestamp into datetime object
774
775 This function currently requires the locale to have been set to C.UTF-8
776 by the caller. This would typically be done at the main entry point to
777 the importer.
778
779 :param str changelog_timestamp_string: the timestamp part of the the
780 signoff line from a changelog entry
781 :rtype: datetime.datetime
782 :returns: the timestamp as a datetime object
783 :raises ValueError: if the string could not be parsed
784 """
785 # We avoid using something like dateutil.parser here because the
786 # parsing behaviour of malformed or unusually formatted dates must be
787 # precisely as specified and not ever change behaviour. If it did, then
788 # imports would no longer be reproducible.
789 #
790 # However, adding new a form of parsing an unambigious date is
791 # acceptable if the spec is first updated accordingly since that would
792 # only introduce new imports that would have previously failed.
793 #
794 # time.strptime ignores time zones, so we must use datetime.strptime()
795
796 # strptime doesn't support anything other than standard locale names
797 # for days of the week, so handle the "Thur" abbreviation as a special
798 # case as defined in the spec as it is unambiguous.
799 adjusted_changelog_timestamp_string = re.sub(
800 r'^Thur,',
801 'Thu,',
802 changelog_timestamp_string,
803 )
804
805 acceptable_date_formats = [
806 '%a, %d %b %Y %H:%M:%S %z', # standard
807 '%A, %d %b %Y %H:%M:%S %z', # full day of week
808 '%d %b %Y %H:%M:%S %z', # missing day of week
809 '%a, %d %B %Y %H:%M:%S %z', # full month name
810 '%A, %d %B %Y %H:%M:%S %z', # full day of week and month name
811 '%d %B %Y %H:%M:%S %z', # missing day of week with full month
812 # name
813 ]
814 for date_format in acceptable_date_formats:
815 try:
816 return datetime.datetime.strptime(
817 adjusted_changelog_timestamp_string,
818 date_format,
819 )
820 except ValueError:
821 pass
822 else:
823 raise ValueError(
824 "Could not parse date %r" % changelog_timestamp_string,
825 )
826
827 def git_authorship(self, author_date=None):
828 """Extract last changelog entry's maintainer and timestamp
829
830 Parse the first changelog entry's sign-off line into git's commit
831 authorship metadata model according to the import specification.
832
833 :param datetime.datetime author_date: overrides the author date
834 normally parsed from the changelog entry (i.e. for handling date
835 parsing edge cases). Any sub-second part of the timestamp is
836 truncated.
837 :rtype: tuple(str, str, int, int)
838 :returns: tuple of name, email, time (in seconds since epoch) and
839 offset from UTC (in minutes)
840 :raises ValueError: if the changelog sign-off line cannot be parsed
841 """
842 m = re.match(r'(?P<name>.*)<+(?P<email>.*?)>+', self.maintainer)
843 if m is None:
844 raise ValueError('Cannot get authorship')
845
846 author_epoch_seconds, author_tz_offset = datetime_to_signature_spec(
847 self._parse_changelog_date(self.date)
848 if author_date is None
849 else author_date
850 )
851
852 return (
853 # If the author name is empty, then it must be
854 # EMPTY_GIT_AUTHOR_NAME because git will not accept an empty author
855 # name. See the specification for details.
856 (
857 m.group('name').strip()
858 or gitubuntu.spec.EMPTY_GIT_AUTHOR_NAME
859 ),
860 m.group('email'),
861 author_epoch_seconds,
862 author_tz_offset,
863 )
864
865
866class GitUbuntuChangelogError(Exception):
867 pass
868
869class PristineTarError(Exception):
870 pass
871
872class PristineTarNotFoundError(PristineTarError):
873 pass
874
875class MultiplePristineTarFoundError(PristineTarError):
876 pass
877
878
879def git_dep14_tag(version):
880 """Munge a version string according to http://dep.debian.net/deps/dep14/"""
881 version = str(version)
882 version = version.replace('~', '_')
883 version = version.replace(':', '%')
884 version = version.replace('..', '.#.')
885 if version.endswith('.'):
886 version = version + '#'
887 if version.endswith('.lock'):
888 pre, _, _ = version.partition('.lock')
889 version = pre + '.#lock'
890 return version
891
892def import_tag(version, namespace, patch_state=PatchState.UNAPPLIED):
893 return '%s/%s/%s' % (
894 namespace,
895 {
896 PatchState.UNAPPLIED: 'import',
897 PatchState.APPLIED: 'applied',
898 }[patch_state],
899 git_dep14_tag(version),
900 )
901
902def reimport_tag_prefix(version, namespace, patch_state=PatchState.UNAPPLIED):
903 return '%s/reimport/%s/%s' % (
904 namespace,
905 {
906 PatchState.UNAPPLIED: 'import',
907 PatchState.APPLIED: 'applied',
908 }[patch_state],
909 git_dep14_tag(version),
910 )
911
912def reimport_tag(
913 version,
914 namespace,
915 reimport,
916 patch_state=PatchState.UNAPPLIED,
917):
918 return '%s/%s' % (
919 reimport_tag_prefix(version, namespace, patch_state=patch_state),
920 reimport,
921 )
922
923def upload_tag(version, namespace):
924 return '%s/upload/%s' % (namespace, git_dep14_tag(version))
925
926def upstream_tag(version, namespace):
927 return '%s/upstream/%s' % (namespace, git_dep14_tag(version))
928
929def orphan_tag(version, namespace):
930 return '%s/orphan/%s' % (namespace, git_dep14_tag(version))
931
932def is_dir_3_0_quilt(_dir=None):
933 _dir = _dir if _dir else '.'
934 try:
935 fmt, _ = run(['dpkg-source', '--print-format', _dir])
936 if '3.0 (quilt)' in fmt:
937 return True
938 except CalledProcessError as e:
939 try:
940 with open(os.path.join(_dir, 'debian/source/format'), 'r') as f:
941 for line in f:
942 if re.match(r'3.0 (.*)', line):
943 return True
944 # `man dpkg-source` indicates no d/s/format implies 1.0
945 except OSError:
946 pass
947
948 return False
949
950def is_3_0_quilt(repo, commitish='HEAD'):
951 with repo.temporary_worktree(commitish):
952 return is_dir_3_0_quilt()
953
954class GitUbuntuRepositoryFetchError(Exception):
955 pass
956
957
958def determine_quilt_series_path(pygit2_repo, treeish_obj):
959 """Find the active quilt series file path in use.
960
961 Look in the given tree for the Debian patch series file that is active
962 according to the search algorithm described in dpkg-source(1). If none are
963 found, return the default series path (again from dpkg-source(1)).
964
965 :param pygit2.Repo pygit2_repo: repository to look in.
966 :param pygit2.Object treeish_obj: object that peels to a pygit2.Tree.
967 :returns: relative path to series file.
968 :rtype: str
969 """
970 for series_name in ['debian.series', 'series']:
971 try:
972 series_path = posixpath.join('debian/patches', series_name)
973 blob = follow_symlinks_to_blob(
974 repo=pygit2_repo,
975 treeish_object=treeish_obj,
976 path=series_path,
977 )
978 except KeyError:
979 continue # try the next path using our search list
980 return series_path # series file blob found at this path
981
982 logging.debug("Unable to find a series file in %r", treeish_obj)
983 return 'debian/patches/series' # default when no series file found
984
985
986def quilt_env(pygit2_repo, treeish):
987 """Find the appropriate quilt environment to use.
988
989 Return the canonical environment that should be used when calling quilt.
990 Since the series file doesn't necessarily always have the same name, a
991 source tree is examined to determine the name and set QUILT_SERIES
992 appropriately.
993
994 This does not integrate any other environment variables. Only environment
995 variables that influence quilt are returned.
996
997 :param pygit2.Repo pygit2_repo: repository to look in.
998 :param pygit2.Object treeish: object that peels to a pygit2.Tree.
999 :returns: quilt-specific environment settings
1000 :rtype: dict
1001 """
1002 return {
1003 'QUILT_PATCHES': 'debian/patches',
1004 'QUILT_SERIES': determine_quilt_series_path(pygit2_repo, treeish),
1005 'QUILT_NO_DIFF_INDEX': '1',
1006 'QUILT_NO_DIFF_TIMESTAMPS': '1',
1007 'EDITOR': 'true',
1008 }
1009
1010
1011def datetime_to_signature_spec(datetime):
1012 """Convert a datetime to the time and offset required by a pygit2.Signature
1013
1014 :param datetime datetime: the timezone-aware datetime to convert
1015 :rtype: tuple(int, int)
1016 :returns: the time since epoch and timezone offset in minutes as suitable
1017 for passing to the pygit2.Signature constructor parameters time and
1018 offset.
1019 """
1020 # Divide by 60 for seconds -> minutes
1021 offset_td = datetime.utcoffset()
1022 offset_mins = (
1023 int(offset_td.total_seconds()) // 60
1024 if offset_td
1025 else 0
1026 )
1027
1028 return int(datetime.timestamp()), offset_mins
1029
1030
1031class HeadInfoItem(collections.namedtuple(
1032 'HeadInfoItem',
1033 [
1034 'version',
1035 'commit_time',
1036 'commit_id',
1037 ],
1038)):
1039 """Information associated with a single branch head
1040
1041 :ivar str version: the package version found in debian/changelog at the
1042 branch head.
1043 :ivar int commit_time: the timestamp of the commit at the branch head,
1044 expressed as seconds since the Unix epoch.
1045 :ivar pygit2.Oid commit_id: the hash of the commit at the branch head.
1046 """
1047 pass
1048
1049
1050class GitUbuntuRepository:
1051 """A class for interacting with an importer git repository
1052
1053 This class attempts to put all objects it manipulates in an
1054 'importer/' namespace. It also uses tags in one of three namespaces:
1055 'import/' for successfully imported published versions (these are
1056 created by the importer); 'upload/' for uploaded version by Ubuntu
1057 developers (these are understood by the importer and are aliased by
1058 import/ tags when succesfully imported); and 'orphan/' for published
1059 versions for which no parents can be found (these are also created
1060 by the importer).
1061
1062 To access the underlying pygit2.Repository object, use the raw_repo
1063 property.
1064 """
1065
1066 def __init__(
1067 self,
1068 local_dir,
1069 lp_user=None,
1070 fetch_proto=None,
1071 delete_on_close=True,
1072 ):
1073 """
1074 If fetch_proto is None, the default value from
1075 gitubuntu.__main__ will be used (top_level_defaults.proto).
1076 """
1077 if local_dir is None:
1078 self._local_dir = tempfile.mkdtemp()
1079 else:
1080 local_dir = os.path.abspath(local_dir)
1081 try:
1082 os.mkdir(local_dir)
1083 except FileExistsError:
1084 local_dir_list = os.listdir(local_dir)
1085 if local_dir_list and os.getenv(
1086 'GIT_DIR',
1087 '.git',
1088 ) not in local_dir_list:
1089 logging.error('Specified directory %s must either '
1090 'be empty or have been previously '
1091 'imported to.', local_dir)
1092 sys.exit(1)
1093 self._local_dir = local_dir
1094
1095 self.raw_repo = pygit2.init_repository(self._local_dir)
1096 # We rely on raw_repo.workdir to be identical to self._local_dir to
1097 # avoid changing previous behaviour in the setting of GIT_WORK_TREE, so
1098 # assert that it is so. This may not be the case if the git repository
1099 # has a different workdir stored in its configuration or if the git
1100 # repository is a bare repository. We didn't handle these cases before
1101 # anyway, so with this assertion we can fail noisily and early.
1102 assert (
1103 os.path.normpath(self.raw_repo.workdir) ==
1104 os.path.normpath(self._local_dir)
1105 )
1106
1107 # Since previous behaviour of this class depended on the state of the
1108 # environment at the time it was constructed, save this for later use
1109 # (for example in deriving the environment to use for calls to the git
1110 # CLI). This permits the behaviour to remain identical for now.
1111 # Eventually we can break previous behaviour and eliminate the need for
1112 # this. See also: gitubuntu.test_fixtures.repo; the handling of EMAIL
1113 # there could be made cleaner when this is cleaned up.
1114 self._initial_env = os.environ.copy()
1115
1116 self.set_git_attributes()
1117
1118 if lp_user:
1119 self._lp_user = lp_user
1120 else:
1121 try:
1122 self._lp_user, _ = self.git_run(
1123 ['config', 'gitubuntu.lpuser'],
1124 verbose_on_failure=False,
1125 )
1126 self._lp_user = self._lp_user.strip()
1127 except CalledProcessError:
1128 self._lp_user = None
1129
1130 if fetch_proto is None:
1131 fetch_proto = top_level_defaults.proto
1132
1133 self._fetch_proto = fetch_proto
1134 self._delete_on_close = delete_on_close
1135
1136 def close(self):
1137 """Free resources associated with this instance
1138
1139 If delete_on_close was True on instance construction, local_dir (as
1140 specified on instance construction) will be deleted.
1141
1142 After this method is called, the instance is invalid and can no longer
1143 be used.
1144 """
1145 if self.raw_repo and self._delete_on_close:
1146 shutil.rmtree(self.local_dir)
1147 self.raw_repo = None
1148
1149 def create_orphan_branch(self, branch_name, msg):
1150 if self.get_head_by_name(branch_name) is None:
1151 self.git_run(['checkout', '--orphan', branch_name])
1152 self.git_run(['commit', '--allow-empty', '-m', msg])
1153 self.git_run(['checkout', '--orphan', 'master'])
1154
1155 @contextmanager
1156 def pristine_tar_branches(self, dist, namespace='pkg', create=True):
1157 """Context manager wrapping pristine-tar branch manipulation
1158
1159 In this context, the repository pristine-tar branch will point to
1160 the pristine-tar branch for @dist distribution in @namespace.
1161
1162 Because of our model, the distribution-pristine-tar branch may
1163 be a local branch (import-time) or a remote-tracking branch
1164 (build-time) and we need different behavior in both cases.
1165 Specifically, we want to affect the local branch's contents, but
1166 we cannot do that to a remote-tracking branch.
1167
1168 Upon entry to the context, detect the former case (by doing a
1169 local only lookup first) and doing a branch rename there.
1170 Otherwise, create a new local branch.
1171
1172 Upon exit, if a local branch had been found, rename pristine-tar
1173 back to the original name. Otherwise, simply delete the created
1174 pristine-tar branch.
1175
1176 If a local branch named pristine-tar existed outside this
1177 context, it will be restored upon leaving the context.
1178
1179 :param dist str One of 'ubuntu' or 'debian'
1180 :param namespace str Namespace under which Git refs are found
1181 :param create bool If an appropriate local pristine-tar Git
1182 branch does not exist, create one using the above algorithm.
1183 """
1184 pt_branch = '%s/importer/%s/pristine-tar' % (namespace, dist)
1185 old_pt_branch = self.raw_repo.lookup_branch('pristine-tar')
1186 old_pt_branch_commit = None
1187 if old_pt_branch:
1188 old_pt_branch_commit = old_pt_branch.peel(pygit2.Commit)
1189 old_pt_branch.delete()
1190 local_pt_branch = self.raw_repo.lookup_branch(pt_branch)
1191 remote_pt_branch = self.raw_repo.lookup_branch(
1192 pt_branch,
1193 pygit2.GIT_BRANCH_REMOTE,
1194 )
1195 if local_pt_branch:
1196 local_pt_branch.rename('pristine-tar')
1197 elif remote_pt_branch:
1198 self.raw_repo.create_branch(
1199 'pristine-tar',
1200 remote_pt_branch.peel(pygit2.Commit),
1201 )
1202 elif create:
1203 # This should only be possible when importing and the first
1204 # pristine-tar usage, create an orphan branch at the local
1205 # pt branch location and flag it for cleanup
1206 local_pt_branch = True
1207 self.create_orphan_branch(
1208 'pristine-tar',
1209 'Initial %s pristine-tar branch.' % dist,
1210 )
1211 if not self.raw_repo.lookup_branch('do-not-push'):
1212 self.create_orphan_branch(
1213 'do-not-push',
1214 'Initial upstream branch.',
1215 )
1216 try:
1217 yield
1218 except:
1219 raise
1220 finally:
1221 if local_pt_branch: # or create above
1222 self.raw_repo.lookup_branch('pristine-tar').rename(pt_branch)
1223 elif remote_pt_branch:
1224 self.raw_repo.lookup_branch('pristine-tar').delete()
1225 if old_pt_branch_commit:
1226 self.raw_repo.create_branch(
1227 'pristine-tar',
1228 old_pt_branch_commit,
1229 )
1230
1231 def pristine_tar_list(self, dist, namespace='pkg'):
1232 """List tarballs stored in pristine-tar branch for @dist distribution in @namespace.
1233
1234 If there is no pristine-tar branch, `pristine-tar list` returns
1235 nothing.
1236
1237 :param dist str One of 'ubuntu' or 'debian'
1238 :param namespace str Namespace under which Git refs are found
1239 :rtype list(str)
1240 :returns List of orig tarball names stored in the pristine-tar
1241 branches
1242 """
1243 with self.pristine_tar_branches(dist, namespace, create=False):
1244 stdout, _ = run(['pristine-tar', 'list'])
1245 return stdout.strip().splitlines()
1246
1247 def pristine_tar_extract(self, pkgname, version, dist=None, namespace='pkg'):
1248 '''Extract orig tarballs for a given package and upstream version
1249
1250 This function will fail if the expected tarballs are already
1251 present by name in the parent directory. If, at some point, this
1252 is not desired, we would need to pass --git-force-create to
1253 gbp-buildpackage.
1254
1255 The files, once created, are the responsibility of the caller to
1256 remove, if necessary.
1257
1258 raises:
1259 - PristineTarNotFoundError if no suitable tarballs are found
1260 - MultiplePristineTarFoundError if multiple distinct suitable tarballs
1261 are found
1262 - CalledProcessError if gbp-buildpackage fails
1263
1264 :param pkgname str Source package name
1265 :param version str Source package upstream version
1266 :param dist str One of 'ubuntu' or 'debian'
1267 :param namespace str Namespace under which Git refs are found
1268 :rtype list(str)
1269 :returns List of tarball paths that are now present on the
1270 filesystem. They will be in the parent directory.
1271 '''
1272 dists = [dist] if dist else ['debian', 'ubuntu']
1273 for dist in dists:
1274 main_tarball = '%s_%s.orig.tar' % (pkgname, version)
1275
1276 all_tarballs = self.pristine_tar_list(dist, namespace)
1277
1278 potential_main_tarballs = [tarball for tarball
1279 in all_tarballs if tarball.startswith(main_tarball)]
1280 if len(potential_main_tarballs) == 0:
1281 continue
1282 if len(potential_main_tarballs) > 1:
1283 # This will need some extension/flag for the case of there
1284 # being multiple imports with varying compression
1285 raise MultiplePristineTarFoundError(
1286 'More than one pristine-tar tarball found for %s: %s' %
1287 (version, potential_main_tarballs)
1288 )
1289 ext = os.path.splitext(potential_main_tarballs[0])[1]
1290 tarballs = []
1291 tarballs.append(
1292 os.path.join(os.path.pardir, potential_main_tarballs[0])
1293 )
1294 args = ['buildpackage', '--git-builder=/bin/true',
1295 '--git-pristine-tar', '--git-ignore-branch',
1296 '--git-upstream-tag=%s/upstream/%s/%%(version)s%s' %
1297 (namespace, dist, ext)]
1298 # This will probably break if the component tarballs get
1299 # compressed differently, as each component tarball will show up
1300 # multiple times
1301 # Breaks may be too strong -- we will 'over cache' tarballs, and
1302 # then it's up to dpkg-buildpackage to use the 'correct' one
1303 potential_component_tarballs = {
1304 component_tarball_matches(tarball, pkgname, version).group('component') : tarball
1305 for tarball in all_tarballs
1306 if component_tarball_matches(tarball, pkgname, version)
1307 }
1308 tarballs.extend(map(lambda x : os.path.join(os.path.pardir, x),
1309 list(potential_component_tarballs.values()))
1310 )
1311 args.extend(map(lambda x : '--git-component=%s' % x,
1312 list(potential_component_tarballs.keys()))
1313 )
1314 with self.pristine_tar_branches(dist, namespace):
1315 run_gbp(args, env=self.env)
1316 return tarballs
1317
1318 raise PristineTarNotFoundError(
1319 'No pristine-tar tarball found for %s' % version
1320 )
1321
1322 def pristine_tar_exists(self, pkgname, version, namespace='pkg'):
1323 '''Report distributions that contain pristine-tar data for @version
1324
1325 raises:
1326 - MultiplePristineTarFoundError if multiple distinct suitable tarballs
1327 are found
1328
1329 :param pkgname str Source package name
1330 :param version str Source package upstream version
1331 :param namespace str Namespace under which Git refs are found
1332 :rtype list(str)
1333 :returns List of distribution names which contain a pristine-tar
1334 import for @pkgname and @version
1335 '''
1336 results = []
1337 for dist in ['debian', 'ubuntu']:
1338 main_tarball = '%s_%s.orig.tar' % (pkgname, version)
1339
1340 all_tarballs = self.pristine_tar_list(dist, namespace)
1341
1342 potential_main_tarballs = [tarball for tarball
1343 in all_tarballs if tarball.startswith(main_tarball)]
1344 if len(potential_main_tarballs) == 0:
1345 continue
1346 if len(potential_main_tarballs) > 1:
1347 # This will need some extension/flag for the case of there
1348 # being multiple imports with varying compression
1349 raise MultiplePristineTarFoundError(
1350 'More than one pristine-tar tarball found for %s: %s' %
1351 (version, potential_main_tarballs)
1352 )
1353 results.append(dist)
1354
1355 return results
1356
1357 def verify_pristine_tar(self, tarball_paths, dist, namespace='pkg'):
1358 '''Verify the pristine-tar data matches for a set of paths
1359
1360 raises:
1361 PristineTarError - if a tarball has been imported before,
1362 but the contents of the new tarball do not match
1363
1364 :param tarball_paths list(str) List of filesystem paths of orig
1365 tarballs to verify
1366 :param dist str One of 'ubuntu' or 'debian'
1367 :param namespace str Namespace under which Git refs are found
1368 :rtype bool
1369 :returns True if all paths in @tarball_paths exist in @dist's
1370 pristine-tar branch under @namespace and match the
1371 corresponding pristine-tar contents exactly
1372 '''
1373 all_tarballs = self.pristine_tar_list(dist, namespace)
1374 for path in tarball_paths:
1375 if os.path.basename(path) not in all_tarballs:
1376 break
1377 try:
1378 with self.pristine_tar_branches(dist, namespace):
1379 # need to handle this not existing
1380 run(['pristine-tar', 'verify', path])
1381 except CalledProcessError as e:
1382 raise PristineTarError(
1383 'Tarball has already been imported to %s with '
1384 'different contents' % dist
1385 )
1386 else:
1387 return True
1388
1389 return False
1390
1391 def set_git_attributes(self):
1392 git_attr_path = os.path.join(self.raw_repo.path,
1393 'info',
1394 'attributes'
1395 )
1396 try:
1397 # common-case: create an attributes file
1398 with open(git_attr_path, 'x') as f:
1399 f.write('* -ident\n')
1400 f.write('* -text\n')
1401 f.write('* -eol\n')
1402 except FileExistsError:
1403 # next-most common-case: attributes file already exists and
1404 # contains our desired value
1405 try:
1406 runq(['grep', '-q', '* -ident', git_attr_path])
1407 except CalledProcessError:
1408 # least-common case: attributes file exists, but does
1409 # not contain our desired value
1410 try:
1411 with open(git_attr_path, 'a') as f:
1412 f.write('* -ident\n')
1413 except:
1414 # failed all three cases to set our desired value in
1415 # attributes file
1416 logging.exception('Unable to set \'* -ident\' in %s' %
1417 git_attr_path
1418 )
1419 sys.exit(1)
1420 try:
1421 runq(['grep', '-q', '* -text', git_attr_path])
1422 except CalledProcessError:
1423 # least-common case: attributes file exists, but does
1424 # not contain our desired value
1425 try:
1426 with open(git_attr_path, 'a') as f:
1427 f.write('* -text\n')
1428 except:
1429 # failed all three cases to set our desired value in
1430 # attributes file
1431 logging.exception('Unable to set \'* -text\' in %s' %
1432 git_attr_path
1433 )
1434 sys.exit(1)
1435 try:
1436 runq(['grep', '-q', '* -eol', git_attr_path])
1437 except CalledProcessError:
1438 # least-common case: attributes file exists, but does
1439 # not contain our desired value
1440 try:
1441 with open(git_attr_path, 'a') as f:
1442 f.write('* -eol\n')
1443 except:
1444 # failed all three cases to set our desired value in
1445 # attributes file
1446 logging.exception('Unable to set \'* -eol\' in %s' %
1447 git_attr_path
1448 )
1449 sys.exit(1)
1450
1451 def remote_exists(self, remote_name):
1452 # https://github.com/libgit2/pygit2/issues/671
1453 return any(remote.name == remote_name for remote in self.raw_repo.remotes)
1454
1455 def _add_remote_by_fetch_url(
1456 self,
1457 remote_name,
1458 fetch_url,
1459 push_url=None,
1460 changelog_notes=False,
1461 ):
1462 """Add a remote by URL
1463
1464 If a remote with the given name doesn't exist, then create it.
1465 Otherwise, do nothing.
1466
1467 :param str remote_name: the name of the remote to create
1468 :param str fetch_url: the fetch URL for the remote
1469 :param str push_url: the push URL for the remote. If None, then a
1470 specific push URL will not be set.
1471 :param bool changelog_notes: if True, then a fetch refspec will be
1472 added to fetch changelog notes. This only makes sense for an
1473 official importer remote such as 'pkg'.
1474 :returns: None
1475 """
1476 if not self._fetch_proto:
1477 raise Exception('Cannot fetch using an object without a protocol')
1478
1479 logging.debug('Adding %s as remote %s', fetch_url, remote_name)
1480
1481 if not self.remote_exists(remote_name):
1482 self.raw_repo.remotes.create(
1483 remote_name,
1484 fetch_url,
1485 '+refs/heads/*:refs/remotes/%s/*' % remote_name,
1486 )
1487 # grab unreachable tags (orphans)
1488 self.raw_repo.remotes.add_fetch(
1489 remote_name,
1490 '+refs/tags/*:refs/tags/%s/*' % remote_name,
1491 )
1492 if changelog_notes:
1493 # The changelog notes are kept at refs/notes/commits on
1494 # Launchpad due to LP: #1871838 even though our standard place
1495 # for them is refs/notes/changelog.
1496 self.raw_repo.remotes.add_fetch(
1497 remote_name,
1498 '+refs/notes/commits:refs/notes/changelog',
1499 )
1500 if push_url:
1501 self.raw_repo.remotes.set_push_url(
1502 remote_name,
1503 push_url,
1504 )
1505 self.git_run(
1506 [
1507 'config',
1508 'remote.%s.tagOpt' % remote_name,
1509 '--no-tags',
1510 ]
1511 )
1512
1513 def _add_remote(self, remote_name, remote_url, changelog_notes=False):
1514 """Add a remote by URL location
1515
1516 URL location means the part of the URL after the proto:// prefix. The
1517 protocol to be used will be determined by what was specified by the
1518 fetch_proto at class instance construction time. Separate fetch and
1519 push URL protocols will be automatically determined.
1520
1521 If a remote with the given name doesn't exist, then create it.
1522 Otherwise, do nothing.
1523
1524 :param str remote_name: the name of the remote to create
1525 :param str remote_url: the URL for the remote but with the proto://
1526 prefix missing.
1527 :param bool changelog_notes: if True, then a fetch refspec will be
1528 added to fetch changelog notes. This only makes sense for an
1529 official importer remote such as 'pkg'.
1530 :returns: None
1531 """
1532 if not self._fetch_proto:
1533 raise Exception('Cannot fetch using an object without a protocol')
1534 if not self._lp_user:
1535 raise RuntimeError("Cannot add remote without knowing lp_user")
1536 fetch_url = '%s://%s' % (self._fetch_proto, remote_url)
1537 push_url = 'ssh://%s@%s' % (self.lp_user, remote_url)
1538
1539 self._add_remote_by_fetch_url(
1540 remote_name=remote_name,
1541 fetch_url=fetch_url,
1542 push_url=push_url,
1543 changelog_notes=changelog_notes,
1544 )
1545
1546 def add_remote(
1547 self,
1548 pkgname,
1549 repo_owner,
1550 remote_name,
1551 changelog_notes=False,
1552 ):
1553 """Add a remote to the repository configuration
1554 :param str pkgname: the name of the source package reflected by this
1555 repository.
1556 :param str repo_owner: the name of the Launchpad user or team whose
1557 repository for the package will be pointed to by this new remote.
1558 If None, the default repository for the source package will be
1559 used.
1560 :param str remote_name: the name of the remote to add.
1561 :param bool changelog_notes: if True, then a fetch refspec will be
1562 added to fetch changelog notes. This only makes sense for an
1563 official importer remote such as 'pkg'.
1564 :returns: None
1565 """
1566 if not self._fetch_proto:
1567 raise Exception('Cannot fetch using an object without a protocol')
1568 if repo_owner:
1569 remote_url = ('git.launchpad.net/~%s/ubuntu/+source/%s' %
1570 (repo_owner, pkgname))
1571 else:
1572 remote_url = ('git.launchpad.net/ubuntu/+source/%s' % pkgname)
1573
1574 self._add_remote(
1575 remote_name=remote_name,
1576 remote_url=remote_url,
1577 changelog_notes=changelog_notes,
1578 )
1579
1580 def add_remote_by_url(self, remote_name, fetch_url):
1581 if not self._fetch_proto:
1582 raise Exception('Cannot fetch using an object without a protocol')
1583
1584 self._add_remote_by_fetch_url(remote_name, fetch_url)
1585
1586 def add_base_remotes(self, pkgname, repo_owner=None):
1587 """Add the 'pkg' base remote to the repository configuration
1588
1589 :param str pkgname: the name of the source package reflected by this
1590 repository.
1591 :param str repo_owner: the name of the Launchpad user or team whose
1592 repository for the package will be pointed to by this new remote.
1593 If None, the default repository for the source package will be
1594 used.
1595 :returns: None
1596 """
1597 self.add_remote(pkgname, repo_owner, 'pkg', changelog_notes=True)
1598
1599 def add_lpuser_remote(self, pkgname):
1600 if not self._fetch_proto:
1601 raise Exception('Cannot add a fetch using an object without a protocol')
1602 if not self._lp_user:
1603 raise RuntimeError("Cannot add remote without knowing lp_user")
1604 remote_url = ('git.launchpad.net/~%s/ubuntu/+source/%s' %
1605 (self.lp_user, pkgname))
1606
1607 self._add_remote(remote_name=self.lp_user, remote_url=remote_url)
1608 # XXX: want a remote alias of 'lpme' -> self.lp_user
1609 # self.git_run(['config', 'url.%s.insteadof' % self.lp_user, 'lpme'])
1610
1611 def fetch_remote(self, remote_name, verbose=False):
1612 # Does not seem to be working with https
1613 # https://github.com/libgit2/pygit2/issues/573
1614 # https://github.com/libgit2/libgit2/issues/3786
1615 # self.raw_repo.remotes[remote_name].fetch()
1616 kwargs = {}
1617 kwargs['verbose_on_failure'] = True
1618 if verbose:
1619 # If we are redirecting stdout/stderr to the console, we
1620 # do not need to have run() also emit it
1621 kwargs['verbose_on_failure'] = False
1622 kwargs['stdout'] = None
1623 kwargs['stderr'] = None
1624 try:
1625 logging.debug("Fetching remote %s", remote_name)
1626 self.git_run(
1627 args=['fetch', remote_name],
1628 env={'GIT_TERMINAL_PROMPT': '0',},
1629 **kwargs
1630 )
1631 except CalledProcessError:
1632 raise GitUbuntuRepositoryFetchError(
1633 "Unable to fetch remote %s" % remote_name
1634 )
1635
1636 def fetch_base_remotes(self, verbose=False):
1637 self.fetch_remote(remote_name='pkg', verbose=verbose)
1638
1639 def fetch_remote_refspecs(self, remote_name, refspecs, verbose=False):
1640 # Does not seem to be working with https
1641 # https://github.com/libgit2/pygit2/issues/573
1642 # https://github.com/libgit2/libgit2/issues/3786
1643 # self.raw_repo.remotes[remote_name].fetch()
1644 for refspec in refspecs:
1645 kwargs = {}
1646 kwargs['verbose_on_failure'] = True
1647 if verbose:
1648 # If we are redirecting stdout/stderr to the console, we
1649 # do not need to have run() also emit it
1650 kwargs['verbose_on_failure'] = False
1651 kwargs['stdout'] = None
1652 kwargs['stderr'] = None
1653 try:
1654 logging.debug(
1655 "Fetching refspec %s from remote %s",
1656 refspec,
1657 remote_name,
1658 )
1659 self.git_run(
1660 args=['fetch', remote_name, refspec],
1661 env={'GIT_TERMINAL_PROMPT': '0',},
1662 **kwargs,
1663 )
1664 except CalledProcessError:
1665 raise GitUbuntuRepositoryFetchError(
1666 "Unable to fetch %s from remote %s" % (
1667 refspecs,
1668 remote_name,
1669 )
1670 )
1671
1672 def fetch_lpuser_remote(self, verbose=False):
1673 if not self._fetch_proto:
1674 raise Exception('Cannot fetch using an object without a protocol')
1675 if not self._lp_user:
1676 raise RuntimeError("Cannot fetch without knowing lp_user")
1677 self.fetch_remote(remote_name=self.lp_user, verbose=verbose)
1678
1679 def copy_base_references(self, namespace):
1680 for ref in self.references:
1681 for (target_refs, source_refs) in [
1682 ('refs/heads/%s/' % namespace, 'refs/remotes/pkg/'),]:
1683 if ref.name.startswith(source_refs):
1684 self.raw_repo.create_reference(
1685 '%s/%s' % (target_refs, ref.name[len(source_refs):]),
1686 ref.peel().id)
1687
1688 def delete_branches_in_namespace(self, namespace):
1689 _local_branches = copy(self.local_branches)
1690 for head in self.local_branches:
1691 if head.branch_name.startswith(namespace):
1692 head.delete()
1693
1694 def delete_tags_in_namespace(self, namespace):
1695 _tags = copy(self.tags)
1696 for ref in self.tags:
1697 if ref.name.startswith('refs/tags/%s' % namespace):
1698 ref.delete()
1699
1700 @property
1701 def env(self):
1702 # Return a copy of the cached _derive_env method result so that the
1703 # caller cannot inadvertently modify our cached answer. Unfortunately
1704 # this leaks the lru_cache-ness of the _derive_env method to this
1705 # property getter, but this seems better than nothing.
1706 return dict(self._derive_env())
1707
1708 @lru_cache()
1709 def _derive_env(self):
1710 """Determine what the git CLI environment should be
1711
1712 This depends on the initial environment saved from the constructor and
1713 the paths associated with self.raw_repo, neither of which should change
1714 in the lifetime of this class instance.
1715 """
1716 return _derive_git_cli_env(
1717 self.raw_repo,
1718 initial_env=self._initial_env
1719 )
1720
1721 @property
1722 def local_dir(self):
1723 """Base directory of this git repository (contains .git/)"""
1724 return self._local_dir
1725
1726 @property
1727 def git_dir(self):
1728 """Same as cached object in the environment"""
1729 return self.raw_repo.path
1730
1731 def _references(self, prefix=''):
1732 return [self.raw_repo.lookup_reference(r) for r in
1733 self.raw_repo.listall_references() if
1734 r.startswith(prefix)]
1735
1736 def references_with_prefix(self, prefix):
1737 return self._references(prefix)
1738
1739 @property
1740 def references(self):
1741 return self._references()
1742
1743 @property
1744 def tags(self):
1745 return self._references('refs/tags')
1746
1747 def _branches(self,
1748 branch_type=pygit2.GIT_BRANCH_LOCAL | pygit2.GIT_BRANCH_REMOTE):
1749 branches = []
1750 if branch_type & pygit2.GIT_BRANCH_LOCAL:
1751 branches.extend([self.raw_repo.lookup_branch(b) for b in
1752 self.raw_repo.listall_branches(pygit2.GIT_BRANCH_LOCAL)])
1753 if branch_type & pygit2.GIT_BRANCH_REMOTE:
1754 branches.extend([self.raw_repo.lookup_branch(b, pygit2.GIT_BRANCH_REMOTE) for b in
1755 self.raw_repo.listall_branches(pygit2.GIT_BRANCH_REMOTE)])
1756 return branches
1757
1758 @property
1759 def branches(self):
1760 return self._branches()
1761
1762 @property
1763 def branch_names(self):
1764 return [b.branch_name for b in self.branches]
1765
1766 @property
1767 def local_branches(self):
1768 return self._branches(pygit2.GIT_BRANCH_LOCAL)
1769
1770 @property
1771 def local_branch_names(self):
1772 return [b.branch_name for b in self.local_branches]
1773
1774 @property
1775 def remote_branches(self):
1776 return self._branches(pygit2.GIT_BRANCH_REMOTE)
1777
1778 @property
1779 def remote_branch_names(self):
1780 return [b.branch_name for b in self.remote_branches]
1781
1782 @property
1783 def lp_user(self):
1784 if not self._lp_user:
1785 raise RuntimeError("lp_user is not set")
1786 return self._lp_user
1787
1788 def get_commitish(self, commitish):
1789 return self.raw_repo.revparse_single(commitish)
1790
1791 def head_to_commit(self, head_name):
1792 return str(self.get_head_by_name(head_name).peel().id)
1793
1794 def get_short_hash(self, hash):
1795 """Return an unambiguous but abbreviated form of a commit hash
1796
1797 Note that the hash may still become ambiguous in the future.
1798 """
1799 stdout, _ = self.git_run(['rev-parse', '--short', hash])
1800 return stdout.strip()
1801
1802 def git_run(self, args, env=None, **kwargs):
1803 """Run the git CLI with the provided arguments
1804
1805 :param list(str) args: arguments to the git CLI
1806 :param dict env: additional environment variables to use
1807 :param dict **kwargs: further arguments to pass through to
1808 gitubuntu.run.run()
1809 :raises subprocess.CalledProcessError: if git exits non-zero
1810 :rtype: (str, str)
1811 :returns: stdout and stderr strings containing the subprocess output
1812
1813 The environment used is based on the Python process' environment at the
1814 time this class instance was constructed.
1815
1816 The GIT_DIR and GIT_WORK_TREE environment variables are set
1817 automatically based on the repository's existing location and settings.
1818
1819 If env is set, then the environment to be used is updated with env
1820 before the call to git is made. This can override GIT_DIR,
1821 GIT_WORK_TREE, and anything else.
1822 """
1823 return git_run(
1824 pygit2_repo=self.raw_repo,
1825 args=args,
1826 initial_env=self._initial_env,
1827 update_env=env,
1828 **kwargs,
1829 )
1830
1831 def garbage_collect(self):
1832 self.git_run(['gc'])
1833
1834 def extract_file_from_treeish(self, treeish_string, filename):
1835 """extract a file from @treeish to a local file
1836
1837 Arguments:
1838 treeish - SHA1 of treeish
1839 filename - file to extract from @treeish
1840
1841 Returns a NamedTemporaryFile that is flushed but not rewound.
1842 """
1843 blob = follow_symlinks_to_blob(
1844 self.raw_repo,
1845 treeish_object=self.raw_repo.revparse_single(treeish_string),
1846 path=filename,
1847 )
1848 outfile = tempfile.NamedTemporaryFile()
1849 outfile.write(blob.data)
1850 outfile.flush()
1851 return outfile
1852
1853 @lru_cache()
1854 def get_changelog_from_treeish(self, treeish_string):
1855 return Changelog.from_treeish(
1856 self.raw_repo,
1857 self.raw_repo.revparse_single(treeish_string),
1858 )
1859
1860 def get_changelog_versions_from_treeish(self, treeish_string):
1861 """Extract current and prior versions from debian/changelog in a
1862 given @treeish_string
1863
1864 Returns (None, None) if the treeish supplied is None or if
1865 'debian/changelog' does not exist in the treeish.
1866
1867 Returns (current, previous) on success.
1868 """
1869 try:
1870 changelog = self.get_changelog_from_treeish(treeish_string)
1871 except KeyError:
1872 # If 'debian/changelog' does
1873 # not exist, then (None, None) is returned. KeyError propagates up
1874 # from Changelog's __init__.
1875 return None, None
1876 try:
1877 return changelog.version, changelog.previous_version
1878 except CalledProcessError:
1879 raise GitUbuntuChangelogError(
1880 'Cannot get changelog versions'
1881 )
1882
1883 def get_changelog_distribution_from_treeish(self, treeish_string):
1884 """Extract targetted distribution from debian/changelog in a
1885 given treeish
1886 """
1887
1888 if treeish_string is None:
1889 return None
1890
1891 try:
1892 return self.get_changelog_from_treeish(treeish_string).distribution
1893 except (KeyError, CalledProcessError):
1894 raise GitUbuntuChangelogError(
1895 'Cannot get changelog distribution'
1896 )
1897
1898 def get_changelog_srcpkg_from_treeish(self, treeish_string):
1899 """Extract srcpkg from debian/changelog in a given treeish
1900 """
1901
1902 if treeish_string is None:
1903 return None
1904
1905 try:
1906 return self.get_changelog_from_treeish(treeish_string).srcpkg
1907 except (KeyError, CalledProcessError):
1908 raise GitUbuntuChangelogError(
1909 'Cannot get changelog source package name'
1910 )
1911
1912 def get_head_info(self, head_prefix, namespace):
1913 """Extract package versions at branch heads
1914
1915 Extract the version from debian/changelog of all
1916 f'{namespace}/{head_prefix>/*' branches, excluding any branch that
1917 contains 'ubuntu/devel'.
1918
1919 :param str namespace: the namespace under which git refs are found
1920 :param str head_prefix: the prefix to look for
1921 :rtype: dict(str, HeadInfoItem)
1922 :returns: a dictionary keyed by the namespaced branch name (ie. without
1923 a 'refs/heads/' prefix but with the namespace prefix, eg.
1924 'importer/ubuntu/focal-devel').
1925 """
1926 head_info = dict()
1927 for head in self.local_branches:
1928 prefix = '%s/%s' % (namespace, head_prefix)
1929 if not head.branch_name.startswith(prefix):
1930 continue
1931 if 'ubuntu/devel' in head.branch_name:
1932 continue
1933 version, _ = (
1934 self.get_changelog_versions_from_treeish(str(head.peel().id))
1935 )
1936 head_info[head.branch_name] = HeadInfoItem(
1937 version=version,
1938 commit_time=head.peel().commit_time,
1939 commit_id=head.peel().id,
1940 )
1941
1942 return head_info
1943
1944 def treeishs_identical(self, treeish_string1, treeish_string2):
1945 if treeish_string1 is None or treeish_string2 is None:
1946 return False
1947 _tree_obj1 = self.raw_repo.revparse_single(treeish_string1)
1948 _tree_id1 = _tree_obj1.peel(pygit2.Tree).id
1949 _tree_obj2 = self.raw_repo.revparse_single(treeish_string2)
1950 _tree_id2 = _tree_obj2.peel(pygit2.Tree).id
1951 return _tree_id1 == _tree_id2
1952
1953 def get_head_by_name(self, name):
1954 try:
1955 return self.raw_repo.lookup_branch(name)
1956 except TypeError:
1957 return None
1958
1959 def get_tag_reference(self, tag):
1960 """Return the tag object if it exists in the repository"""
1961 try:
1962 return self.raw_repo.lookup_reference('refs/tags/%s' % tag)
1963 except (KeyError, ValueError):
1964 return None
1965
1966 def get_import_tag(
1967 self,
1968 version,
1969 namespace,
1970 patch_state=PatchState.UNAPPLIED,
1971 ):
1972 """
1973 Return the import tag matching the given specification.
1974
1975 :param str version: the package version string to match
1976 :param str namespace: the namespace under which git refs are found
1977 :param PatchState patch_state: whether to look for unapplied or applied
1978 tags
1979 :returns: the matching import tag, or None if there is no match
1980 :rtype: pygit2.Reference or None
1981 """
1982 return self.get_tag_reference(
1983 import_tag(version, namespace, patch_state)
1984 )
1985
1986 def get_reimport_tag(
1987 self,
1988 version,
1989 namespace,
1990 reimport,
1991 patch_state=PatchState.UNAPPLIED,
1992 ):
1993 """
1994 Return the reimport tag matching the given specification.
1995
1996 :param str version: the package version string to match
1997 :param str namespace: the namespace under which git refs are found
1998 :param int reimport: the sequence number of the reimport tag
1999 :param PatchState patch_state: whether to look for unapplied or applied
2000 tags
2001 :returns: the matching reimport tag, or None if there is no match
2002 :rtype: pygit2.Reference or None
2003 """
2004 return self.get_tag_reference(
2005 reimport_tag(version, namespace, reimport, patch_state)
2006 )
2007
2008 def get_all_reimport_tags(
2009 self,
2010 version,
2011 namespace,
2012 patch_state=PatchState.UNAPPLIED,
2013 ):
2014 """
2015 Return all reimport tags matching the given specification.
2016
2017 :param str version: the package version string to match
2018 :param str namespace: the namespace under which git refs are found
2019 :param PatchState patch_state: whether to look for unapplied or applied
2020 tags
2021 :returns: matching reimport tags
2022 :rtype: sequence(pygit2.Reference)
2023 """
2024 return self.references_with_prefix(
2025 'refs/tags/%s/' % reimport_tag_prefix(
2026 version,
2027 namespace,
2028 patch_state,
2029 )
2030 )
2031
2032 def get_upload_tag(self, version, namespace):
2033 """
2034 Return the upload tag matching the given specification.
2035
2036 :param str version: the package version string to match
2037 :param str namespace: the namespace under which git refs are found
2038 :returns: the matching upload tag, or None if there is no match
2039 :rtype: pygit2.Reference or None
2040 """
2041 return self.get_tag_reference(upload_tag(version, namespace))
2042
2043 def get_upstream_tag(self, version, namespace):
2044 """
2045 Return the upstream tag matching the given specification.
2046
2047 :param str version: the package version string to match
2048 :param str namespace: the namespace under which git refs are found
2049 :returns: the matching upstream tag, or None if there is no match
2050 :rtype: pygit2.Reference or None
2051 """
2052 return self.get_tag_reference(upstream_tag(version, namespace))
2053
2054 def get_orphan_tag(self, version, namespace):
2055 """
2056 Return the orphan tag matching the given specification.
2057
2058 :param str version: the package version string to match
2059 :param str namespace: the namespace under which git refs are found
2060 :returns: the matching orphan tag, or None if there is no match
2061 :rtype: pygit2.Reference or None
2062 """
2063 return self.get_tag_reference(orphan_tag(version, namespace))
2064
2065 def create_tag(self,
2066 commit_hash,
2067 tag_name,
2068 tag_msg,
2069 tagger=None,
2070 ):
2071 """Create a tag in the repository
2072
2073 :param str commit_hash: the commit hash the tag will point to.
2074 :param str tag_name: the name of the tag to be created.
2075 :param str tag_msg: the text of the tag annotation.
2076 :param pygit2.Signature tagger: if supplied, use this signature in the
2077 created tag's "tagger" metadata. If not supplied, an arbitrary name
2078 and email address is used with the current time.
2079 :returns: None
2080 """
2081 if not tagger:
2082 tagger_time, tagger_offset = datetime_to_signature_spec(
2083 datetime.datetime.now(),
2084 )
2085 tagger = pygit2.Signature(
2086 gitubuntu.spec.SYNTHESIZED_COMMITTER_NAME,
2087 gitubuntu.spec.SYNTHESIZED_COMMITTER_EMAIL,
2088 tagger_time,
2089 tagger_offset,
2090 )
2091
2092 logging.debug("Creating tag %s pointing to %s", tag_name, commit_hash)
2093 self.raw_repo.create_tag(
2094 tag_name,
2095 pygit2.Oid(hex=commit_hash),
2096 pygit2.GIT_OBJ_COMMIT,
2097 tagger,
2098 tag_msg,
2099 )
2100
2101 def nearest_remote_branches(self, commit_hash, prefix=None,
2102 max_commits=100
2103 ):
2104 '''Return the set of remote branches nearest to @commit_hash
2105
2106 This is a set of remote branch objects that are currently
2107 pointing at a commit, where that commit is the nearest ancestor
2108 to @commit_hash among the possible commits.
2109
2110 If no such commit is found, an empty set is returned.
2111
2112 Only consider remote branches that start with @prefix.
2113
2114 Stop searching beyond the @max_commits'-th ancestor. Usually this method
2115 is only used as a heuristic that generally will never need to go too far
2116 back in history, and this avoids searching all the way back to the root
2117 commit, which may be a long way.
2118 '''
2119
2120 # 1) cache all prefixed branch names by commit
2121 remote_heads_by_commit = collections.defaultdict(set)
2122 for b in self.remote_branches:
2123 if prefix is None or b.branch_name.startswith(prefix):
2124 remote_heads_by_commit[b.peel().id].add(b)
2125
2126 # 2) walk from commit_hash backwards until a cached commit is found
2127 commits = self.raw_repo.walk(
2128 self.get_commitish(commit_hash).id,
2129 pygit2.GIT_SORT_TOPOLOGICAL,
2130 )
2131 for commit in itertools.islice(commits, max_commits):
2132 if commit.id not in remote_heads_by_commit:
2133 continue # avoid creating a bunch of empty sets
2134
2135 if remote_heads_by_commit[commit.id]:
2136 return remote_heads_by_commit[commit.id]
2137
2138 # in the currently impossible (but permitted in this state) case
2139 # that the dictionary returned an empty set, we loop around again
2140 # which is what we want.
2141
2142 return set()
2143
2144
2145 def nearest_tag(
2146 self,
2147 commitish_string,
2148 prefix,
2149 max_commits=100,
2150 ):
2151 # 1) cache all patterned tag names by commit
2152 pattern_tags_by_commit = collections.defaultdict(set)
2153 for t in self.tags:
2154 if t.name.startswith('refs/tags/' + prefix):
2155 pattern_tags_by_commit[t.peel(pygit2.Commit).id].add(t)
2156
2157 commits = self.raw_repo.walk(
2158 self.get_commitish(commitish_string).id,
2159 pygit2.GIT_SORT_TOPOLOGICAL,
2160 )
2161 for commit in itertools.islice(commits, max_commits):
2162 if commit.id not in pattern_tags_by_commit:
2163 continue
2164
2165 return pattern_tags_by_commit[commit.id].pop()
2166
2167 return None
2168
2169 @staticmethod
2170 def tag_to_pretty_name(tag):
2171 _, _, pretty_name = tag.name.partition('refs/tags/')
2172 return pretty_name
2173
2174 def create_tracking_branch(self, branch_name, upstream_name, force=False):
2175 return self.raw_repo.create_branch(
2176 branch_name,
2177 self.raw_repo.lookup_branch(
2178 upstream_name,
2179 pygit2.GIT_BRANCH_REMOTE
2180 ).peel(pygit2.Commit),
2181 force
2182 )
2183
2184 def checkout_commitish(self, commitish):
2185 # pygit2 checkout does not accept hashes
2186 # https://github.com/libgit2/pygit2/issues/412
2187 # self.raw_repo.checkout_tree(self.get_commitish(commitish))
2188 self.git_run(['checkout', commitish])
2189
2190 def reset_commitish(self, commitish):
2191 # pygit2 checkout does not accept hashes
2192 # https://github.com/libgit2/pygit2/issues/412
2193 # self.checkout_tree(self.get_commitish(commitish))
2194 self.git_run(['reset', '--hard', commitish])
2195
2196 def update_head_to_commit(self, head_name, commit_hash):
2197 try:
2198 self.raw_repo.lookup_branch(head_name).set_target(commit_hash)
2199 except AttributeError:
2200 self.raw_repo.create_branch(head_name,
2201 self.raw_repo.get(commit_hash)
2202 )
2203
2204 def clean_repository_state(self):
2205 """Cleanup working tree"""
2206 runq(['git', 'checkout', '--orphan', 'master'],
2207 check=False, env=self.env)
2208 runq(['git', 'reset', '--hard'], env=self.env)
2209 runq(['git', 'clean', '-f', '-d'], env=self.env)
2210
2211 def get_all_changelog_versions_from_treeish(self, treeish):
2212 changelog = self.get_changelog_from_treeish(treeish)
2213 return changelog.all_versions
2214
2215 def annotated_tag(self, tag_name, commitish, force, msg=None):
2216 try:
2217 args = ['tag', '-a', tag_name, commitish]
2218 if force:
2219 args += ['-f']
2220 if msg is not None:
2221 args += ['-m', msg]
2222 self.git_run(args, stdin=None, stdout=None, stderr=None)
2223 version, _ = self.get_changelog_versions_from_treeish(commitish)
2224 logging.info('Created annotated tag %s for version %s' % (tag_name, version))
2225 except:
2226 logging.error('Unable to tag %s. Does it already exist (pass -f)?' %
2227 tag_name
2228 )
2229 raise
2230
2231 def tag(self, tag_name, commitish, force):
2232 try:
2233 args = ['tag', tag_name, commitish]
2234 if force:
2235 args += ['-f']
2236 self.git_run(args)
2237 version, _ = self.get_changelog_versions_from_treeish(commitish)
2238 logging.info('Created tag %s for version %s' % (tag_name, version))
2239 except:
2240 logging.error('Unable to tag %s. Does it already exist (pass -f)?' %
2241 tag_name
2242 )
2243 raise
2244
2245 def commit_source_tree(
2246 self,
2247 tree,
2248 parents,
2249 log_message,
2250 commit_date=None,
2251 author_date=None,
2252 ):
2253 """Commit a git tree with appropriate parents and message
2254
2255 Given a git tree that contains a source package, create a matching
2256 commit using metadata derived from the tree as required according to
2257 the import specification.
2258
2259 Commit metadata elements that are not specified as derived from the
2260 tree itself are required as parameters.
2261
2262 :param pygit2.Oid tree: reference to the git tree in this repository
2263 that contains a debian/changelog file
2264 :param list(pygit2.Oid) parents: parent commits of the commit to be
2265 created
2266 :param bytes log_message: commit message
2267 :param datetime.datetime commit_date: the commit date to use (any
2268 sub-second part of the timestamp is truncated). If None, use the
2269 current date.
2270 :param datetime.datetime author_date: overrides the author date
2271 normally parsed from the changelog entry (i.e. for handling date
2272 parsing edge cases). Any sub-second part of the timestamp is
2273 truncated.
2274 :returns: reference to the created commit
2275 :rtype: pygit2.Oid
2276 """
2277 if commit_date is None:
2278 commit_date = datetime.datetime.now()
2279
2280 commit_time, commit_offset = datetime_to_signature_spec(commit_date)
2281 changelog = self.get_changelog_from_treeish(str(tree))
2282
2283 return self.raw_repo.create_commit(
2284 None, # ref: do not update any ref
2285 pygit2.Signature(*changelog.git_authorship(author_date)), # author
2286 pygit2.Signature( # committer
2287 name=gitubuntu.spec.SYNTHESIZED_COMMITTER_NAME,
2288 email=gitubuntu.spec.SYNTHESIZED_COMMITTER_EMAIL,
2289 time=commit_time,
2290 offset=commit_offset,
2291 ),
2292 log_message, # message
2293 tree, # tree
2294 parents, # parents
2295 )
2296
2297
2298 @classmethod
2299 def _create_replacement_tree_builder(cls, repo, treeish, sub_path):
2300 '''Create a replacement TreeBuilder
2301
2302 Create a TreeBuilder based on an existing repository, top-level
2303 tree-ish and path inside that tree.
2304
2305 A sub_path of '' is taken to mean a request for a replacement
2306 TreeBuilder for the top level tree.
2307
2308 Returns a TreeBuilder object pre-populated with the previous contents.
2309 If the path did not previously exist in the tree-ish, then return an
2310 empty TreeBuilder instead.
2311 '''
2312
2313 tree = treeish.peel(pygit2.GIT_OBJ_TREE)
2314
2315 # Short path: sub_path == '' means want root
2316 if not sub_path:
2317 return repo.TreeBuilder(tree)
2318
2319 try:
2320 tree_entry = tree[sub_path]
2321 except KeyError:
2322 # sub_path does not exist in tree, so return an empty TreeBuilder
2323 tree_builder = repo.TreeBuilder()
2324 else:
2325 # The tree entry must itself be a tree
2326 assert tree_entry.filemode == pygit2.GIT_FILEMODE_TREE
2327 sub_tree = repo.get(tree_entry.id).peel(pygit2.GIT_OBJ_TREE)
2328 tree_builder = repo.TreeBuilder(sub_tree)
2329
2330 return tree_builder
2331
2332 @classmethod
2333 def _add_missing_tree_dirs(cls, repo, top_path, top_tree_object, _sub_path=''):
2334 """
2335 Recursively add empty directories to a tree object
2336
2337 Find empty directories under top_path and make sure that empty tree
2338 objects exist for them. If this means that the tree object must change,
2339 then a replacement tree object is created accordingly.
2340
2341 repo: pygit2.Repository object
2342 top_path: path to the extracted contents of the tree
2343 top_tree_object: tree object
2344 _sub_path (internal): relative path for where we are for recursive call
2345
2346 Returns None if oid unchanged, or oid if it changed.
2347 """
2348
2349 # full path to our _sub_path, including top_path
2350 full_path = os.path.join(top_path, _sub_path)
2351
2352 dir_list = os.listdir(full_path)
2353 if not dir_list:
2354 # directory is empty, so this is always the empty tree object
2355 return repo.TreeBuilder().write()
2356
2357 # tree_builder is None if we don't need one yet, or is the replacement
2358 # for the tree object for this recursive call
2359 tree_builder = None
2360 for entry in dir_list:
2361 entry_path = os.path.join(full_path, entry)
2362 # We cannot use os.path.isdir() here as we don't want to recurse
2363 # down symlinks to directories.
2364 if stat.S_ISDIR(os.lstat(entry_path).st_mode):
2365 # this is a directory, so recurse down
2366 entry_oid = cls._add_missing_tree_dirs(
2367 repo=repo,
2368 top_path=top_path,
2369 top_tree_object=top_tree_object,
2370 _sub_path=os.path.join(_sub_path, entry),
2371 )
2372 if entry_oid:
2373 # The recursive call reported a change to the tree, so we
2374 # must adopt it in what we return to propogate the change
2375 # upwards.
2376 if tree_builder is None:
2377 # There is no replacement in progress for this
2378 # recursive call's tree object, so start one.
2379 tree_builder = cls._create_replacement_tree_builder(
2380 repo=repo,
2381 treeish=top_tree_object,
2382 sub_path=_sub_path,
2383 )
2384 # If the entry previous existed, remove it.
2385 if tree_builder.get(entry):
2386 tree_builder.remove(entry)
2387 # Add the replacement tree entry
2388 tree_builder.insert( # (takes no kwargs)
2389 entry, # name
2390 entry_oid, # oid
2391 pygit2.GIT_FILEMODE_TREE, # attr
2392 )
2393
2394 if tree_builder is None:
2395 return None # no changes
2396 else:
2397 return tree_builder.write() # create replacement tree object
2398
2399 @classmethod
2400 def dir_to_tree(cls, pygit2_repo, path, escape=False):
2401 """Create a git tree object from the given filesystem path
2402
2403 :param pygit2.Repository pygit2_repo: the repository on which to
2404 operate. If you have a GitUbuntuRepository instance, you can use
2405 its raw_repo property.
2406 :param path: path to filesystem directory to be the root of the tree
2407 :param escape: if True, escape using escape_dot_git() first. This
2408 mutates the provided filesystem tree.
2409
2410 escape should be used when the directory being moved into git is
2411 directly from a source package, since the source package may contain
2412 files or directories named '.git' and these cannot otherwise be
2413 represented in a git tree object.
2414
2415 escape should not be used if the directory has already been escaped
2416 previously. For example: if escape was previously used to move into a
2417 git tree object, and that git tree object has been extracted to a
2418 working directory for manipulation without unescaping, then escape
2419 should not be used again to move that result back into a git tree
2420 object.
2421 """
2422 if escape:
2423 escape_dot_git(path)
2424 # git expects the index file to not exist (in order to create a fresh
2425 # one), so create a temporary directory to put it in so we have a name
2426 # we can use safely.
2427 with tempfile.TemporaryDirectory() as index_dir:
2428 index_path = os.path.join(index_dir, 'index')
2429 def indexed_git_run(*args):
2430 return git_run(
2431 pygit2_repo=pygit2_repo,
2432 args=args,
2433 work_tree_path=path,
2434 index_path=index_path,
2435 )
2436 indexed_git_run('add', '-f', '-A')
2437 indexed_git_run('reset', 'HEAD', '--', '.git')
2438 indexed_git_run('reset', 'HEAD', '--', '.pc')
2439 tree_hash_str, _ = indexed_git_run('write-tree')
2440 tree_hash_str = tree_hash_str.strip()
2441 tree = pygit2_repo.get(tree_hash_str)
2442
2443 # Add any empty directories that git did not import. Workaround for LP:
2444 # #1687057.
2445 replacement_oid = cls._add_missing_tree_dirs(
2446 repo=pygit2_repo,
2447 top_path=path,
2448 top_tree_object=tree,
2449 )
2450 if replacement_oid:
2451 # Empty directories had to be added
2452 return str(replacement_oid) # return the replacement instead
2453 else:
2454 # No empty directories were added
2455 return tree_hash_str # no replacement was needed
2456
2457 @contextmanager
2458 def temporary_worktree(self, commitish, prefix=None):
2459 with tempfile.TemporaryDirectory(prefix=prefix) as tempdir:
2460 self.git_run(
2461 [
2462 'worktree',
2463 'add',
2464 '--detach',
2465 '--force',
2466 tempdir,
2467 commitish,
2468 ]
2469 )
2470
2471 oldcwd = os.getcwd()
2472 os.chdir(tempdir)
2473
2474 try:
2475 yield
2476 except:
2477 raise
2478 finally:
2479 os.chdir(oldcwd)
2480
2481 self.git_run(['worktree', 'prune'])
2482
2483 def tree_hash_after_command(self, commitish, cmd):
2484 with self.temporary_worktree(commitish):
2485 try:
2486 run(cmd)
2487 except CalledProcessError as e:
2488 logging.error("Unable to execute `%s`", ' '.join(cmd))
2489 raise
2490
2491 run(["git", "add", "-f", ".",])
2492 tree_hash, _ = run(["git", "write-tree"])
2493 return tree_hash.strip()
2494
2495 def tree_hash_subpath(self, treeish_string, path):
2496 """Get the tree hash for path at a given treeish
2497
2498 Arguments:
2499 @treeish_string: a string Git treeish
2500 @path: a string path present in @treeish_string
2501
2502 Returns:
2503 String hash of Git tree corresponding to @path in @treeish_string
2504 """
2505 tree_obj = self.raw_repo.revparse_single(treeish_string).peel(
2506 pygit2.Tree
2507 )
2508 return str(tree_obj[path].id)
2509
2510 def paths_are_identical(self, treeish1_string, treeish2_string, path):
2511 """Determine if a given path is the same in two treeishs
2512
2513 Arguments:
2514 @treeish1_string: a string Git treeish
2515 @treeish2_string: a string Git treeish
2516 @path: a string path present in @treeish1_string and @treeish2_string
2517
2518 Returns:
2519 True, if @path is the same in @treeish1_string and @treeish2_string
2520 False, otherwise
2521 """
2522 try:
2523 subpath_tree_hash1 = self.tree_hash_subpath(
2524 treeish1_string,
2525 path,
2526 )
2527 except KeyError:
2528 # if the path does not exist in treeish
2529 subpath_tree_hash1 = None
2530 try:
2531 subpath_tree_hash2 = self.tree_hash_subpath(
2532 treeish2_string,
2533 path,
2534 )
2535 except KeyError:
2536 subpath_tree_hash2 = None
2537
2538 return subpath_tree_hash1 == subpath_tree_hash2
2539
2540 @lru_cache()
2541 def quilt_env(self, treeish):
2542 """Return a suitable environment for running quilt.
2543
2544 This varies depending on the supplied commit since both
2545 debian/patches/series and debian/patches/debian.series may be valid.
2546 See dpkg-source(1) for details.
2547
2548 The returned environment includes all necessary variables by
2549 combining self.env with the needed quilt-specific environment.
2550
2551 :param pygit.Object treeish: object that peels to the pygit2.Tree on
2552 which quilt will operate.
2553 :rtype: dict
2554 :returns: an environment suitable for running quilt.
2555 """
2556 env = self.env.copy()
2557 env.update(quilt_env(self.raw_repo, treeish))
2558 return env
2559
2560 def quilt_env_from_treeish_str(self, treeish_str):
2561 """Return a suitable environment for running quilt.
2562
2563 This is a thin wrapper around quilt_env() that works with a treeish hex
2564 string instead of directly with a treeish object.
2565
2566 :param str treeish_str: the hash of the tree on which quilt will
2567 operate, in hex.
2568 :rtype: dict
2569 :returns: an environment suitable for running quilt.
2570 """
2571 return self.quilt_env(self.raw_repo.get(treeish_str))
2572
2573 def is_patches_applied(self, commit_hash, regenerated_pc_path):
2574 # first see if quilt push -a would do anything to
2575 # differentiate between applied and unapplied
2576 with self.temporary_worktree(commit_hash):
2577 try:
2578 run_quilt(
2579 ['push', '-a'],
2580 env=self.quilt_env_from_treeish_str(commit_hash),
2581 )
2582 # False if in an unapplied state, which is signified by
2583 # successful push (rc=0)
2584 return False
2585 except CalledProcessError as e:
2586 # non-zero return might be an error or it might mean no
2587 # patches exist
2588 if e.returncode == 1:
2589 # an error may occur if we need to recreate the .pc
2590 # first
2591 try:
2592 # the first quilt push may have created a .pc/
2593 shutil.rmtree('.pc')
2594 shutil.copytree(
2595 regenerated_pc_path,
2596 '.pc',
2597 )
2598 except FileNotFoundError:
2599 # if there was no .pc directory, then the first
2600 # quilt push failure was a real error
2601 raise e
2602
2603 try:
2604 run_quilt(
2605 ['push', '-a'],
2606 env=self.quilt_env_from_treeish_str(commit_hash),
2607 )
2608 # False if in an unapplied state
2609 return False
2610 except CalledProcessError as e:
2611 # True if in a patches-applied state or
2612 # there are no patches to apply
2613 if e.returncode == 2:
2614 return True
2615 else:
2616 raise
2617 # True if in a patches-applied state or there are
2618 # no patches to apply
2619 elif e.returncode == 2:
2620 return True
2621 else:
2622 raise
2623
2624 def _maybe_quiltify_tree_hash(self, commit_hash):
2625 """Determine if quiltify is needed and yield the quiltify'd tree hash
2626
2627 The imported patches-applied trees do not contain .pc
2628 directories. To determine if an additional quilt patch is
2629 necessary, we have to first regenerate the .pc directory, then
2630 see if dpkg-source --commit generates a new quilt patch.
2631
2632 In order for dpkg-source --commit to function, we need to know
2633 if the commit we are building is patches-unapplied or
2634 patches-applied. In the latter case, we can build the commit
2635 directly after copying the regenerated .pc directory. In the
2636 former case, we do not want to copy the regenerated .pc
2637 directory, as dpkg-source will do this for us, as it applies the
2638 current patches. We determine if patches are applied or
2639 unapplied by relying on `quilt push -a`'s exit status at
2640 @commit_hash.
2641
2642 This is a common method used by multiple callers.
2643
2644 Arguments:
2645 @commit_hash: a string Git commit hash
2646
2647 Returns:
2648 String tree hash of quiltify'ing @commit_hash.
2649 If no quiltify is needed, the return value is @commit_hash's
2650 tree hash
2651 """
2652 commit_tree_hash = str(
2653 self.raw_repo.get(commit_hash).peel(pygit2.Tree).id
2654 )
2655 if not is_3_0_quilt(self, commit_hash):
2656 return commit_tree_hash
2657 # the tarballs need to be in the parent directory from where
2658 # we need the orig tarballs for quilt and dpkg-source
2659 # but suppress any logging
2660 logger = logging.getLogger()
2661 oldLevel = logger.getEffectiveLevel()
2662 logger.setLevel(logging.WARNING)
2663 tarballs = gitubuntu.build.fetch_orig(
2664 orig_search_list=gitubuntu.build.derive_orig_search_list_from_args(
2665 self,
2666 commitish=commit_hash,
2667 for_merge=False,
2668 no_pristine_tar=False,
2669 ),
2670 changelog=Changelog.from_treeish(
2671 self.raw_repo,
2672 self.raw_repo.get(commit_hash)
2673 ),
2674 )
2675 logger.setLevel(oldLevel)
2676 # run dpkg-source
2677 with tempfile.TemporaryDirectory() as tempdir:
2678 # copy the generated tarballs
2679 new_tarballs = []
2680 for tarball in tarballs:
2681 new_tarballs.append(shutil.copy(tarball, tempdir))
2682 tarballs = new_tarballs
2683
2684 # create a nested temporary directory where we will recreate
2685 # the .pc directory
2686 with tempfile.TemporaryDirectory(prefix=tempdir+'/') as ttempdir:
2687 oldcwd = os.getcwd()
2688 os.chdir(ttempdir)
2689
2690 for tarball in tarballs:
2691 run(['tar', '-x', '--strip-components=1', '-f', tarball,])
2692
2693 # need the debia/patches
2694 shutil.copytree(
2695 os.path.join(self.local_dir, 'debian',),
2696 'debian',
2697 )
2698
2699 # generate the equivalent .pc directory
2700 run_quilt(
2701 ['push', '-a'],
2702 env=self.quilt_env_from_treeish_str(commit_hash),
2703 rcs=[2],
2704 )
2705
2706 regenerated_pc_path = os.path.join(tempdir, '.pc')
2707
2708 if os.path.exists(".pc"):
2709 shutil.copytree(
2710 '.pc',
2711 regenerated_pc_path,
2712 )
2713
2714 os.chdir(oldcwd)
2715
2716 patches_applied = self.is_patches_applied(
2717 commit_hash,
2718 regenerated_pc_path,
2719 )
2720
2721 with self.temporary_worktree(commit_hash, prefix=tempdir+'/'):
2722 # we only need to copy the generated .pc directory
2723 # if we are building a patches-applied tree, which
2724 # we determine by comparing our current tree hash to
2725 # the generated tree hash.
2726 if patches_applied:
2727 try:
2728 shutil.copytree(
2729 regenerated_pc_path,
2730 '.pc',
2731 )
2732 except FileNotFoundError:
2733 # it is possible no quilt patches exist yet
2734 pass
2735
2736 fixup_patch_path = os.path.join(
2737 'debian',
2738 'patches',
2739 'git-ubuntu-fixup.patch'
2740 )
2741
2742 if os.path.exists(fixup_patch_path):
2743 raise ValueError(
2744 "A quilt patch with the name git-ubuntu-fixup.patch "
2745 "already exists in %s" % commit_hash
2746 )
2747
2748 run(
2749 [
2750 'dpkg-source',
2751 '--commit',
2752 '.',
2753 'git-ubuntu-fixup.patch',
2754 ],
2755 env=self.quilt_env_from_treeish_str(commit_hash),
2756 )
2757
2758 # do not want the .pc directory in the resulting
2759 # treeish
2760 if os.path.exists('.pc'):
2761 shutil.rmtree('.pc')
2762
2763 if os.path.exists(fixup_patch_path):
2764 # dpkg-source uses debian/changelog to generate some
2765 # fields. We do not know yet if the changelog has
2766 # been updated, so elide that section of comments.
2767 with open(fixup_patch_path, 'r+') as f:
2768 for line in f:
2769 if '---' in line:
2770 break
2771 text = """Description: git-ubuntu generated quilt fixup patch
2772TODO: Put a short summary on the line above and replace this paragraph
2773with a longer explanation of this change. Complete the meta-information
2774with other relevant fields (see below for details).
2775---\n"""
2776 for line in f:
2777 text += line
2778 f.seek(0)
2779 f.write(text)
2780 f.truncate()
2781
2782 # If we are on a patches-unapplied tree, then we
2783 # need to reset ourselves back to @commit_hash with
2784 # our new patch.
2785 # In order for this to be buildable, we have to
2786 # reverse-apply our patch, to undo the git-commited
2787 # upstream changes.
2788 if not patches_applied:
2789 run(['git', 'add', '-f', 'debian/patches',])
2790 # if any patches add files that are untracked,
2791 # remove them
2792 run(['git', 'clean', '-f', '-d',])
2793 # reset all the other files to their status in
2794 # HEAD
2795 run(['git', 'checkout', commit_hash, '--', '*',])
2796 with open(fixup_patch_path, 'rb') as f:
2797 run(['patch', '-Rp1',], input=f.read())
2798
2799 return self.dir_to_tree(self.raw_repo, '.')
2800 else:
2801 return commit_tree_hash
2802
2803 def maybe_quiltify_tree_hash(self, commitish_string):
2804 """Determine if quiltify is needed and return the quiltify'd tree hash
2805
2806 See _maybe_quiltify_tree_hash for details.
2807
2808 Arguments:
2809 @commitish_string: a string Git commitish
2810
2811 Returns:
2812 String tree hash of quiltify'ing @commitish_string.
2813 If no quiltify is needed, the return value is the tree hash of
2814 @commitish_string.
2815 """
2816 commit_hash = str(
2817 self.get_commitish(commitish_string).peel(pygit2.Commit).id
2818 )
2819 return self._maybe_quiltify_tree_hash(commit_hash)
2820
2821 def maybe_changelogify_tree_hash(self, commit_hash):
2822 """Determine if changelogify is needed and yield the changelogify'd tree hash
2823
2824 Given a commit, we need to detect if the user has inserted a
2825 changelog entry relative to a published version for the purpose
2826 of test builds.
2827
2828 Arguments:
2829 @commit_hash: a string Git commit hash
2830
2831 Returns:
2832 String tree hash of changelogify'ing @commit_hash.
2833 If no changelogify is needed, the return value is the tree hash of
2834 @commit_hash.
2835 """
2836 commit_tree_hash = str(
2837 self.raw_repo.get(commit_hash).peel(pygit2.Tree).id
2838 )
2839
2840 # one of these are the "base" pkg that @commit_hash's changes
2841 # are based on
2842 remote_tag = self.nearest_tag(
2843 commit_hash,
2844 prefix='pkg/',
2845 )
2846 remote_branch = derive_target_branch(
2847 self,
2848 commit_hash,
2849 )
2850
2851 assert remote_tag or remote_branch
2852
2853 if remote_tag:
2854 if remote_branch:
2855 try:
2856 self.git_run(
2857 [
2858 'merge-base',
2859 '--is-ancestor',
2860 remote_tag.name,
2861 remote_branch,
2862 ],
2863 verbose_on_failure=False,
2864 )
2865 parent_ref = remote_branch
2866 except CalledProcessError as e:
2867 if e.returncode == 1:
2868 parent_ref = remote_tag.name
2869 else:
2870 raise
2871 else:
2872 parent_ref = remote_tag.name
2873 else:
2874 parent_ref = remote_branch
2875
2876 # If there are any changes relative to parent_ref but there are
2877 # not any changelog changes, insert a snapshot changelog entry,
2878 # starting from parent_ref, and return the resulting tree hash.
2879 if str(self.raw_repo.revparse_single(parent_ref).peel(
2880 pygit2.Tree
2881 ).id) != commit_tree_hash and self.paths_are_identical(
2882 parent_ref,
2883 commit_hash,
2884 'debian/changelog',
2885 ):
2886 with self.temporary_worktree(commit_hash):
2887 run_gbp(
2888 [
2889 'dch',
2890 '--snapshot',
2891 '--ignore-branch',
2892 '--since=%s' % str(parent_ref),
2893 ],
2894 env=self.env,
2895 )
2896 return self.dir_to_tree(self.raw_repo, '.')
2897
2898 # otherwise, return @commit_hash's tree hash
2899 return commit_tree_hash
2900
2901 def quiltify_and_changelogify_tree_hash(self, commitish_string):
2902 """Given a commitish, possibly quiltify and changelogify its tree
2903
2904 Definitions:
2905 quiltify: generate a quilt patch from untracked upstream
2906 changes
2907 changelogify: generate a snapshot changelog entry if any
2908 changes exist, and no new changelog entry yet exists
2909
2910 Arguments:
2911 @commitish_string: string Git commitish
2912
2913 Returns:
2914 string Git tree hash of quiltify-ing and changelogify-ing
2915 @commitish_string, if needed
2916 if neither quiltify or changelogify are needed, return
2917 @commitish_string's tree hash
2918 """
2919 commit_hash = str(
2920 self.get_commitish(commitish_string).peel(pygit2.Commit).id
2921 )
2922 quiltify_tree_hash = self._maybe_quiltify_tree_hash(commit_hash)
2923 changelogify_tree_hash = self.maybe_changelogify_tree_hash(commit_hash)
2924
2925 quiltify_tree_obj = self.raw_repo.get(quiltify_tree_hash)
2926 changelogify_tree_obj = self.raw_repo.get(changelogify_tree_hash)
2927
2928 # There are multiple ways to solve this problem, but the
2929 # simplest is to use a TreeBuilder to merge the quiltify tree
2930 # with the changelog from the changelogify tree
2931 # top-level TreeBuilder
2932 tb = self.raw_repo.TreeBuilder(quiltify_tree_obj)
2933 te = tb.get('debian')
2934 # TreeBuilder for debian/
2935 dtb = self.raw_repo.TreeBuilder(self.raw_repo.get(te.id))
2936 dtb.insert( # does not take kwargs
2937 'changelog', # name
2938 changelogify_tree_obj['debian/changelog'].oid, # oid
2939 pygit2.GIT_FILEMODE_BLOB, # attr
2940 )
2941 # insert can replace
2942 tb.insert( # does not take kwargs
2943 'debian', # name
2944 dtb.write(), # oid
2945 pygit2.GIT_FILEMODE_TREE, # attr
2946 )
2947 return str(tb.write())
2948
2949 def find_ubuntu_merge_base(
2950 self,
2951 ubuntu_commitish,
2952 ):
2953 """Find the Ubuntu merge point for a given Ubuntu version
2954
2955 :param ubuntu_commitish str A commitish describing the latest
2956 Ubuntu commit
2957
2958 :rtype str
2959 :returns Commit hash of import of Debian version
2960 @ubuntu_commitish is based on. The imported Debian version
2961 must be an ancestor of @ubuntu_commitish. If no suitable
2962 commit is found, an empty string is returned.
2963 """
2964 merge_base_tag = None
2965
2966 # obtain the nearest imported Debian version per the changelog
2967 for version in self.get_all_changelog_versions_from_treeish(
2968 ubuntu_commitish,
2969 ):
2970 # extract corresponding Debian version
2971 debian_parts, _ = gitubuntu.versioning.split_version_string(
2972 version
2973 )
2974 expected_debian_version = "".join(debian_parts)
2975
2976 # We do not currently handle the case of a Debian version
2977 # being reimported. I think the proper way to support that
2978 # would be to add a parameter to `git ubuntu merge` for the
2979 # user to tell us which reimport tag is the one the Ubuntu
2980 # delta is based on.
2981 merge_base_tag = self.get_import_tag(
2982 expected_debian_version,
2983 'pkg',
2984 )
2985
2986 if merge_base_tag:
2987 assert not self.get_all_reimport_tags(
2988 expected_debian_version,
2989 'pkg',
2990 )
2991 break
2992
2993 if not merge_base_tag:
2994 logging.error(
2995 "Unable to find an import tag for any Debian version "
2996 "in %s:debian/changelog.",
2997 ubuntu_commitish,
2998 )
2999 return ''
3000
3001 merge_base_commit_hash = str(merge_base_tag.peel(pygit2.Commit).id)
3002
3003 try:
3004 self.git_run(
3005 [
3006 'merge-base',
3007 '--is-ancestor',
3008 merge_base_commit_hash,
3009 ubuntu_commitish,
3010 ],
3011 verbose_on_failure=False,
3012 )
3013 except CalledProcessError as e:
3014 if e.returncode != 1:
3015 raise
3016 logging.error(
3017 "Found an import tag for %s (commit: %s), but it is "
3018 "not an ancestor of %s.",
3019 expected_debian_version,
3020 merge_base_commit_hash,
3021 ubuntu_commitish,
3022 )
3023 return ''
3024
3025 return merge_base_commit_hash
3026>>>>>>> gitubuntu/git_repository.py
diff --git a/gitubuntu/git_repository_test.py b/gitubuntu/git_repository_test.py
0new file mode 1006443027new file mode 100644
index 0000000..72055f9
--- /dev/null
+++ b/gitubuntu/git_repository_test.py
@@ -0,0 +1,1191 @@
1<<<<<<< gitubuntu/git_repository_test.py
2=======
3import copy
4import datetime
5import itertools
6import os
7import pkg_resources
8import shutil
9import tempfile
10import unittest
11import unittest.mock
12
13import pygit2
14import pytest
15
16import gitubuntu.git_repository as target
17from gitubuntu.git_repository import HeadInfoItem
18from gitubuntu.repo_builder import (
19 Blob,
20 Commit,
21 Placeholder,
22 Repo,
23 SourceTree,
24 Symlink,
25 Tree,
26)
27from gitubuntu.source_builder import Source, SourceSpec
28import gitubuntu.spec
29from gitubuntu.test_fixtures import (
30 repo,
31 pygit2_repo,
32)
33from gitubuntu.test_util import get_test_changelog
34
35
36@pytest.mark.parametrize('same_remote_branch_names, different_remote_branch_names, expected', [
37 ([], [], ''),
38 (['pkg/ubuntu/xenial-devel',], [], 'pkg/ubuntu/xenial-devel'),
39 (['pkg/ubuntu/xenial-security',], [], 'pkg/ubuntu/xenial-security'),
40 (['pkg/ubuntu/xenial-updates', 'pkg/ubuntu/xenial-devel'], [],
41 'pkg/ubuntu/xenial-devel'
42 ),
43 ([], ['pkg/ubuntu/xenial-updates', 'pkg/ubuntu/xenial-devel'],
44 ''
45 ),
46 (['pkg/ubuntu/zesty-devel', 'pkg/ubuntu/zesty-proposed', 'pkg/ubuntu/devel'], [], 'pkg/ubuntu/devel'),
47])
48def test__derive_target_branch_string(same_remote_branch_names,
49 different_remote_branch_names, expected
50):
51 remote_branch_objects = []
52 for branch_name in same_remote_branch_names:
53 b = unittest.mock.Mock()
54 b.peel(pygit2.Tree).id = unittest.mock.sentinel.same_id
55 b.branch_name = branch_name
56 remote_branch_objects.append(b)
57 for branch_name in different_remote_branch_names:
58 b = unittest.mock.Mock()
59 b.peel(pygit2.Tree).id = object() # need a different sentinel for each
60 b.branch_name = branch_name
61 remote_branch_objects.append(b)
62 target_branch_string = target._derive_target_branch_string(
63 remote_branch_objects
64 )
65 assert target_branch_string == expected
66
67
68@pytest.mark.parametrize('changelog_name, expected', [
69 ('test_versions_1', ['1.0', None]),
70 ('test_versions_2', ['2.0', '1.0']),
71 ('test_versions_3', ['4.0', '3.0']),
72 ('test_versions_unknown', ['ss-970814-1', None]),
73])
74def test_changelog_versions(changelog_name, expected):
75 test_changelog = get_test_changelog(changelog_name)
76 assert [test_changelog.version, test_changelog.previous_version] == expected
77
78
79@pytest.mark.parametrize('changelog_name, expected', [
80 ('test_versions_unknown', ['ss-970814-1',]),
81])
82def test_changelog_all_versions(changelog_name, expected):
83 test_changelog = get_test_changelog(changelog_name)
84 assert test_changelog.all_versions == expected
85
86
87def test_changelog_distribution():
88 test_changelog = get_test_changelog('test_distribution')
89 assert test_changelog.distribution == 'xenial'
90
91
92def test_changelog_date():
93 test_changelog = get_test_changelog('test_date_1')
94 assert test_changelog.date == 'Mon, 12 May 2016 08:14:34 -0700'
95 test_changelog = get_test_changelog('test_date_2')
96 assert test_changelog.date == 'Mon, 12 May 2016 08:14:34 -0700'
97
98
99@pytest.mark.parametrize('changelog_name, expected', [
100 ('test_maintainer_1', 'Test Maintainer <test-maintainer@donotmail.com>'),
101 ('test_maintainer_2', '<test-maintainer@donotmail.com>'),
102])
103def test_changelog_maintainer(changelog_name, expected):
104 test_changelog = get_test_changelog(changelog_name)
105 assert test_changelog.maintainer == expected
106
107
108def test_changelog_maintainer_invalid():
109 with pytest.raises(ValueError):
110 test_changelog = get_test_changelog('test_maintainer_3')
111 test_changelog.maintainer
112
113
114def test_changelog_multiple_angle_brackets():
115 """An email address with extra angle brackets should still parse"""
116 test_changelog = get_test_changelog('test_multiple_angle_brackets')
117 assert test_changelog.git_authorship()[1] == 'micah@debian.org'
118
119
120@pytest.mark.parametrize(['input_date_string', 'expected_result'], [
121 # The normal complete form
122 ('Mon, 12 May 2016 08:14:34 -0700', (2016, 5, 12, 8, 14, 34, -7)),
123 # Day of week missing, such as in:
124 # datefudge 1.12
125 ('12 May 2016 08:14:34 -0700', (2016, 5, 12, 8, 14, 34, -7)),
126 # Full (not abbreviated) month name, such as in:
127 # dnsmasq 2.32-2
128 # dropbear 0.42-1
129 # e2fsprogs 1.42.11-1
130 # efibootmgr 0.5.4-7
131 # hunspell-br 0.11-1
132 # kubuntu-default-settings 1:6.06-22
133 # libvformat 1.13-4
134 ('12 June 2016 08:14:34 -0700', (2016, 6, 12, 8, 14, 34, -7)),
135 # Full (not abbreviated) day of week name, such as in:
136 # logcheck 1.2.22a
137 ('Thursday, 15 May 2016 08:14:34 -0700', (2016, 5, 15, 8, 14, 34, -7)),
138 # Part-abbreviated day of week name, such as in:
139 # kubuntu-meta 1.76
140 ('Thur, 15 May 2016 08:14:34 -0700', (2016, 5, 15, 8, 14, 34, -7)),
141])
142def test_parse_changelog_date(input_date_string, expected_result):
143 """_parse_changelog_date should parse a basic date string correctly
144
145 :param str input_date_string: the timestamp part of the changelog signoff
146 line
147 :param tuple(int, int, int, int, int, int, int) expected_result: the
148 expected parse result in (year, month, day, hour, minute, second,
149 timezone_offset_in_hours) form. The actual expected result needs to be
150 a datetime.datetime object; to avoid duplication in test parameters
151 this will be instantiated within the test.
152 """
153 actual_result = target.Changelog._parse_changelog_date(input_date_string)
154 expected_result_datetime = datetime.datetime(
155 *expected_result[:6],
156 tzinfo=datetime.timezone(datetime.timedelta(hours=expected_result[6])),
157 )
158 assert actual_result == expected_result_datetime
159
160
161@pytest.mark.parametrize(['input_date_string'], [
162 ('Mon, 30 Feb 2020 15:50:58 +0200',), # ghostscript 9.50~dfsg-5ubuntu4
163 ('Mon, 03 Sep 2018 00:43:25 -7000',), # lxqt-config 0.13.0-0ubuntu4
164 ('Tue, 17 May 2008 10:93:55 -0500',), # iscsitarget
165 # 0.4.15+svn148-2.1ubuntu1
166 ('Monu, 22 Jan 2007 22:10:50 -0500',), # mail-spf-perl 2.004-0ubuntu1
167 ('Wed, 29 Augl 2007 16:14:11 +0200',), # nut 2.2.0-2
168])
169def test_changelog_date_parse_errors(input_date_string):
170 """_parse_changelog_date should raise ValueError on illegal dates
171
172 :param str input_date_string: the timestamp part of the changelog signoff
173 line
174 """
175 with pytest.raises(ValueError):
176 target.Changelog._parse_changelog_date(input_date_string)
177
178
179@pytest.mark.parametrize(
180 'changelog_name, name, email, epoch_seconds, offset', [
181 (
182 'test_maintainer_1',
183 'Test Maintainer',
184 'test-maintainer@donotmail.com',
185 0,
186 0,
187 ),
188 (
189 'test_maintainer_2',
190 'Unnamed', # git won't handle empty names; see the spec
191 'test-maintainer@donotmail.com',
192 0,
193 0,
194 ),
195 (
196 'test_date_1',
197 'Test Maintainer',
198 'test-maintainer@donotmail.com',
199 1463066074,
200 -420,
201 ),
202 (
203 'test_date_2',
204 'Test Maintainer',
205 'test-maintainer@donotmail.com',
206 1463066074,
207 -420,
208 ),
209 (
210 'maintainer_name_leading_space',
211 'Test Maintainer',
212 'test-maintainer@example.com',
213 0,
214 0,
215 ),
216 (
217 'maintainer_name_trailing_space',
218 'Test Maintainer',
219 'test-maintainer@example.com',
220 0,
221 0,
222 ),
223 (
224 'maintainer_name_inner_space',
225 'Test Maintainer',
226 'test-maintainer@example.com',
227 0,
228 0,
229 ),
230])
231def test_changelog_authorship(
232 changelog_name,
233 name,
234 email,
235 epoch_seconds,
236 offset,
237):
238 result = get_test_changelog(changelog_name).git_authorship()
239 assert result == (name, email, epoch_seconds, offset)
240
241
242def test_changelog_utf8():
243 test_changelog = get_test_changelog('test_utf8_error')
244 assert test_changelog.version == '1.0.3-2'
245
246
247def test_changelog_duplicate():
248 # Changelog.all_versions should successfully return without an assertion
249
250 # Xenial's dpkg-parsechangelog eliminates duplicate versions. Bionic's
251 # dpkg-parsechangelog does not. We rely on the behaviour of
252 # dpkg-parsechangelog from Bionic, where this test passes. The test fails
253 # when using Xenial's dpkg-parsechangelog, where its behaviour doesn't
254 # match our assumptions elsewhere.
255
256 # -with-extra includes an extra changelog entry at the end. This is
257 # currently needed to trip the assertion because it truncates the longer
258 # list before its comparison. This will get fixed in a subsequent commit,
259 # but using it here ensures that this test will correctly trip regardless
260 # of the presence of that unrelated bug.
261 test_changelog = get_test_changelog('duplicate-version-with-extra')
262 test_changelog.all_versions
263
264
265def test_changelog_all_versions_assertion_mismatched_length():
266 # if Changelog.all_versions finds that self._changelog.versions mismatches
267 # self._shell_all_versions, it is supposed to raise an assertion. Here is
268 # an edge case where at one point in development it did not. We fake both
269 # _changelog.versions and _shell_all_versions to an edge case where they
270 # mismatch.
271 with unittest.mock.patch(
272 'gitubuntu.git_repository.Changelog._shell_all_versions',
273 new_callable=unittest.mock.PropertyMock
274 ) as mock_shell_all_versions:
275 mock_shell_all_versions.return_value = ['a']
276 test_changelog = target.Changelog(b'')
277 test_changelog._changelog = unittest.mock.Mock()
278 test_changelog._changelog.versions = ['a', 'b']
279 with pytest.raises(target.ChangelogError):
280 test_changelog.all_versions
281
282
283@pytest.mark.parametrize('tree_func', [
284 # The tree_func parameter is a function that accepts a mock Blob that is to
285 # represent the changelog blob itself and returns a mock Tree with the mock
286 # Blob embedded somewhere within it. The test function can then ensure that
287 # follow_symlinks_to_blob can correctly find the changelog Blob given the
288 # Tree.
289
290 # Of course this is only expected to work if, after checking out the Tree,
291 # "cat debian/changelog" would work. But this allows us to test the various
292 # permutations of symlink following in Trees that _are_ valid.
293
294 # Simple case
295 lambda b: Tree({
296 'debian': Tree({'changelog': b}),
297 }),
298
299 # Symlink in debian/
300 lambda b: Tree({
301 'debian': Tree({
302 'changelog.real': b,
303 'changelog': Symlink('changelog.real'),
304 }),
305 }),
306
307 # Symlink to parent directory
308 lambda b: Tree({
309 'changelog': b,
310 'debian': Tree({
311 'changelog': Symlink('../changelog'),
312 })
313 }),
314
315 # Symlink to subdirectory
316 lambda b: Tree({
317 'debian': Tree({
318 'changelog': Symlink('subdirectory/changelog'),
319 'subdirectory': Tree({'changelog': b}),
320 })
321 }),
322
323 # debian/ itself is a symlink to a different directory
324 lambda b: Tree({
325 'pkg': Tree({'changelog': b}),
326 'debian': Symlink('pkg'),
327 })
328])
329def test_follow_symlinks_to_blob(pygit2_repo, tree_func):
330 blob = Blob(b'')
331 blob_id = blob.write(pygit2_repo)
332 tree = pygit2_repo.get(tree_func(blob).write(pygit2_repo))
333 result_blob = target.follow_symlinks_to_blob(
334 pygit2_repo,
335 tree,
336 'debian/changelog',
337 )
338 assert result_blob.id == blob_id
339
340
341@pytest.mark.parametrize('tree', [
342 Tree({}),
343 Tree({'debian': Tree({})}),
344 Tree({'debian': Tree({'changelog': Symlink('other')})}),
345 Tree({'debian': Tree({'changelog': Symlink('../other')})}),
346])
347def test_follow_symlinks_to_blob_not_found(pygit2_repo, tree):
348 pygit2_tree = pygit2_repo.get(tree.write(pygit2_repo))
349 with pytest.raises(KeyError):
350 target.follow_symlinks_to_blob(
351 pygit2_repo,
352 pygit2_tree,
353 'debian/changelog',
354 )
355
356
357def test_renameable_dir_basename(tmpdir):
358 p = tmpdir.join('foo')
359 p.ensure()
360 rd = target.RenameableDir(str(p))
361 assert rd.basename == 'foo'
362
363
364def test_renameable_dir_basename_setter(tmpdir):
365 p = tmpdir.join('foo')
366 p.ensure()
367 rd = target.RenameableDir(str(p))
368 rd.basename = 'bar'
369 assert rd.basename == 'bar'
370 assert tmpdir.join('bar').check()
371
372
373def test_dot_git_match(tmpdir):
374 for name in ['.git', 'git', '..git', 'other']:
375 tmpdir.join(name).ensure()
376
377 result = set(
378 x.basename
379 for x in tmpdir.listdir(
380 fil=lambda x: target._dot_git_match(str(x.basename))
381 )
382 )
383 assert result == set(['.git', '..git'])
384
385
386def test_renameable_dir_listdir(tmpdir):
387 for name in ['.git', 'git', '..git', 'other']:
388 tmpdir.join(name).ensure()
389 rd = target.RenameableDir(str(tmpdir))
390 result = set(rd.listdir(target._dot_git_match))
391 assert result == set([
392 target.RenameableDir(os.path.join(str(tmpdir), '.git')),
393 target.RenameableDir(os.path.join(str(tmpdir), '..git')),
394 ])
395
396
397def test_renamable_dir_recursive(tmpdir):
398 a = tmpdir.join('foo')
399 a.ensure_dir()
400 b = tmpdir.join('bar')
401 b.ensure()
402 assert target.RenameableDir(str(a)).recursive
403 assert not target.RenameableDir(str(b)).recursive
404
405
406def test_renameable_dir_recursive_symlink_directory(tmpdir):
407 """A RenameableDir should not treat a broken symlink as recursive"""
408 test_symlink = tmpdir.join('foo')
409 nonexistent_file = tmpdir.join('nonexistent_file')
410 test_symlink.mksymlinkto(nonexistent_file)
411 assert not target.RenameableDir(str(test_symlink)).recursive
412
413
414def test_renameable_dir_str(tmpdir):
415 p = tmpdir.join('foo')
416 p.ensure()
417 rd = target.RenameableDir(str(p))
418 assert str(rd) == os.path.join(str(tmpdir), 'foo')
419
420
421def test_renameable_dir_repr(tmpdir):
422 p = tmpdir.join('foo')
423 p.ensure()
424 rd = target.RenameableDir(str(p))
425 assert repr(rd) == ("RenameableDir('%s/foo')" % str(tmpdir))
426
427
428def test_renameable_dir_hash_eq(tmpdir):
429 p1a = tmpdir.join('foo')
430 p1b = tmpdir.join('foo')
431 p2 = tmpdir.join('bar')
432
433 p1a.ensure()
434 p2.ensure()
435
436 rd1a = target.RenameableDir(str(p1a))
437 rd1b = target.RenameableDir(str(p1b))
438 rd2 = target.RenameableDir(str(p2))
439
440 assert rd1a == rd1b
441 assert rd1a != rd2
442
443
444def test_renameable_dir_must_exist(tmpdir):
445 """A RenameableDir should reject a path that doesn't exist"""
446 with pytest.raises(FileNotFoundError):
447 target.RenameableDir(tmpdir.join('a'))
448
449
450def test_fake_renameable_dir_basename():
451 path = target.FakeRenameableDir('foo', None)
452 assert path.basename == 'foo'
453
454
455def test_fake_renameable_dir_basename_setter():
456 path = target.FakeRenameableDir('foo', None)
457 path.basename = 'bar'
458 assert path.basename == 'bar'
459
460
461def test_fake_renameable_dir_listdir():
462 path = target.FakeRenameableDir(None, [
463 target.FakeRenameableDir('.git', None),
464 target.FakeRenameableDir('git', None),
465 target.FakeRenameableDir('..git', None),
466 target.FakeRenameableDir('other', None),
467 ])
468 result = set(x.basename for x in path.listdir(fil=target._dot_git_match))
469 assert result == set(['.git', '..git'])
470
471
472def test_fake_renameable_dir_recursive():
473 assert target.FakeRenameableDir(['foo'], []).recursive
474 assert not target.FakeRenameableDir(['foo'], None).recursive
475
476
477def test_fake_renameable_dir_hash_eq():
478 variations = [
479 target.FakeRenameableDir(None, None),
480 target.FakeRenameableDir(None, []),
481 target.FakeRenameableDir('foo', []),
482 target.FakeRenameableDir(None, [
483 target.FakeRenameableDir('foo', None)]
484 ),
485 target.FakeRenameableDir(None, [
486 target.FakeRenameableDir('foo', [
487 target.FakeRenameableDir('bar', None)
488 ]),
489 ]),
490 ]
491 for a, b in itertools.product(variations, variations):
492 if a is b:
493 assert a == b
494 else:
495 assert a != b
496
497
498def test_fake_renameable_dir_repr():
499 rd = target.FakeRenameableDir('foo', [target.FakeRenameableDir('bar', [])])
500 assert (
501 repr(rd) == "FakeRenameableDir('foo', [FakeRenameableDir('bar', [])])"
502 )
503
504
505@pytest.mark.parametrize('initial,expected', [
506 # Empty directory remains unchanged
507 (
508 target.FakeRenameableDir(None, []),
509 target.FakeRenameableDir(None, []),
510 ),
511 # Basic .git -> ..git escape
512 (
513 target.FakeRenameableDir(
514 None,
515 [target.FakeRenameableDir('.git', None)],
516 ),
517 target.FakeRenameableDir(
518 None,
519 [target.FakeRenameableDir('..git', None)],
520 ),
521 ),
522 # .git contains a .git
523 (
524 target.FakeRenameableDir(
525 None,
526 [
527 target.FakeRenameableDir(
528 '.git',
529 [target.FakeRenameableDir('.git', None)],
530 )
531 ],
532 ),
533 target.FakeRenameableDir(
534 None,
535 [
536 target.FakeRenameableDir(
537 '..git',
538 [target.FakeRenameableDir('..git', None)],
539 )
540 ],
541 ),
542 ),
543 # git remains unchanged
544 (
545 target.FakeRenameableDir(
546 None,
547 [target.FakeRenameableDir('git', None)],
548 ),
549 target.FakeRenameableDir(
550 None,
551 [target.FakeRenameableDir('git', None)],
552 ),
553 ),
554 # .git and ..git both exist
555 (
556 target.FakeRenameableDir(
557 None,
558 [
559 target.FakeRenameableDir('.git', None),
560 target.FakeRenameableDir('..git', None),
561 ],
562 ),
563 target.FakeRenameableDir(
564 None,
565 [
566 target.FakeRenameableDir('..git', None),
567 target.FakeRenameableDir('...git', None),
568 ],
569 ),
570 ),
571 # Ordinary directory contains a .git
572 (
573 target.FakeRenameableDir(
574 None,
575 [
576 target.FakeRenameableDir(
577 'foo',
578 [target.FakeRenameableDir('.git', None)],
579 )
580 ]
581 ),
582 target.FakeRenameableDir(
583 None,
584 [
585 target.FakeRenameableDir(
586 'foo',
587 [target.FakeRenameableDir('..git', None)],
588 )
589 ]
590 ),
591 ),
592])
593def test_escape_dot_git(initial, expected):
594 state = copy.deepcopy(initial)
595 # Once escaped, we should get to what was expected
596 target._escape_unescape_dot_git(state, target._EscapeDirection.ESCAPE)
597 assert state == expected
598 # Once unescaped, we should get back to where we started since the escaping
599 # mechanism is lossless.
600 target._escape_unescape_dot_git(state, target._EscapeDirection.UNESCAPE)
601 assert state == initial
602
603
604def test_unescape_dot_git_raises():
605 """Test that unescaping something with '.git' raises an exception."""
606 with pytest.raises(RuntimeError):
607 target._escape_unescape_dot_git(
608 target.FakeRenameableDir(
609 None,
610 [target.FakeRenameableDir('.git', None)],
611 ),
612 direction=target._EscapeDirection.UNESCAPE,
613 )
614
615
616@pytest.mark.parametrize('direction', [
617 target._EscapeDirection.ESCAPE,
618 target._EscapeDirection.UNESCAPE,
619])
620def test_escape_dot_git_ordering(direction):
621 """Test that renames happen in the correct order.
622
623 ...git -> ....git must happen before ..git -> ...git to avoid a collision,
624 and vice versa in the unescape case.
625 """
626 # Avoid '.git' as it isn't valid in the reverse direction
627 inner2 = target.FakeRenameableDir('..git', None)
628 inner3 = target.FakeRenameableDir('...git', None)
629 inputs = [inner2, inner3]
630 if direction is target._EscapeDirection.ESCAPE:
631 expected_order = [inner3, inner2]
632 else:
633 expected_order = [inner2, inner3]
634 for given_order in [inputs, reversed(inputs)]:
635 top = target.FakeRenameableDir(None, given_order)
636 target._escape_unescape_dot_git(top, direction)
637 assert all(x is y for x, y in zip(top._rename_record, expected_order))
638
639
640def test_empty_dir_to_tree(pygit2_repo, tmpdir):
641 tree_hash = target.GitUbuntuRepository.dir_to_tree(
642 pygit2_repo,
643 str(tmpdir),
644 )
645 assert tree_hash == str(Tree({}).write(pygit2_repo))
646
647
648def test_onefile_dir_to_tree(pygit2_repo, tmpdir):
649 tmpdir.join('foo').write('bar')
650 tree_hash = target.GitUbuntuRepository.dir_to_tree(
651 pygit2_repo,
652 str(tmpdir),
653 )
654 assert tree_hash == str(Tree({'foo': Blob(b'bar')}).write(pygit2_repo))
655
656
657def test_git_escape_dir_to_tree(pygit2_repo, tmpdir):
658 tmpdir.mkdir('.git')
659 tree_hash = target.GitUbuntuRepository.dir_to_tree(
660 pygit2_repo,
661 str(tmpdir),
662 escape=True,
663 )
664 assert tree_hash == str(Tree({'..git': Tree({})}).write(pygit2_repo))
665
666
667@pytest.mark.parametrize('tree_data,expected_path', [
668 # Empty tree -> default
669 (Tree({}), 'debian/patches/series'),
670
671 # Empty debian/patches directory -> default
672 (Tree({'debian': Tree({'patches': Tree({})})}), 'debian/patches/series'),
673
674 # Only debian/patches/series -> that one
675 (
676 Tree({'debian': Tree({'patches': Tree({'series': Blob(b'')})})}),
677 'debian/patches/series',
678 ),
679
680 # Only debian/patches/debian.series -> that one
681 (
682 Tree({'debian': Tree({'patches': Tree({
683 'debian.series': Blob(b'')
684 })})}),
685 'debian/patches/debian.series',
686 ),
687
688 # Both -> debian.series
689 (
690 Tree({'debian': Tree({'patches': Tree({
691 'debian.series': Blob(b''),
692 'series': Blob(b''),
693 })})}),
694 'debian/patches/debian.series',
695 ),
696])
697def test_determine_quilt_series_path(pygit2_repo, tree_data, expected_path):
698 tree_obj = pygit2_repo.get(tree_data.write(pygit2_repo))
699 path = target.determine_quilt_series_path(pygit2_repo, tree_obj)
700 assert path == expected_path
701
702
703def test_quilt_env(pygit2_repo):
704 tree_builder = Tree({'debian':
705 Tree({'patches': Tree({'debian.series': Blob(b'')})})
706 })
707 tree_obj = pygit2_repo.get(tree_builder.write(pygit2_repo))
708 env = target.quilt_env(pygit2_repo, tree_obj)
709 assert env == {
710 'EDITOR': 'true',
711 'QUILT_NO_DIFF_INDEX': '1',
712 'QUILT_NO_DIFF_TIMESTAMPS': '1',
713 'QUILT_PATCHES': 'debian/patches',
714 'QUILT_SERIES': 'debian/patches/debian.series',
715 }
716
717
718def test_repo_quilt_env(repo):
719 tree_builder = Tree({'debian':
720 Tree({'patches': Tree({'debian.series': Blob(b'')})})
721 })
722 tree_obj = repo.raw_repo.get(tree_builder.write(repo.raw_repo))
723 env = repo.quilt_env(tree_obj)
724 expected_inside = {
725 'EDITOR': 'true',
726 'QUILT_NO_DIFF_INDEX': '1',
727 'QUILT_NO_DIFF_TIMESTAMPS': '1',
728 'QUILT_PATCHES': 'debian/patches',
729 'QUILT_SERIES': 'debian/patches/debian.series',
730 }
731 for k, v in expected_inside.items():
732 assert env[k] == v
733
734 # In addition to the settings above, check that
735 # GitUbuntuRepository.quilt_env has correctly merged in the usual
736 # environment. Testing that a few keys that we expect to be set are set
737 # should suffice.
738 expected_other_keys = ['HOME', 'GIT_DIR', 'GIT_WORK_TREE']
739 for k in expected_other_keys:
740 assert env[k]
741
742
743def test_repo_quilt_env_from_treeish_str(repo):
744 tree_builder = Tree({'debian':
745 Tree({'patches': Tree({'debian.series': Blob(b'')})})
746 })
747 tree_obj = repo.raw_repo.get(tree_builder.write(repo.raw_repo))
748 env = repo.quilt_env_from_treeish_str(str(tree_obj.id))
749 expected_inside = {
750 'EDITOR': 'true',
751 'QUILT_NO_DIFF_INDEX': '1',
752 'QUILT_NO_DIFF_TIMESTAMPS': '1',
753 'QUILT_PATCHES': 'debian/patches',
754 'QUILT_SERIES': 'debian/patches/debian.series',
755 }
756 for k, v in expected_inside.items():
757 assert env[k] == v
758
759
760def test_repo_derive_env_change(repo):
761 # Changing the dictionary of a GitUbuntuRepository instance env attribute
762 # must not have any effect on the env itself. While this may stretch a
763 # little further than a normal instance property, it's worth enforcing this
764 # as this particular attribute is at particular risk due to how it tends to
765 # be used.
766 e1 = repo.env
767 e1[unittest.mock.sentinel.k] = unittest.mock.sentinel.v
768 assert unittest.mock.sentinel.k not in repo.env
769
770
771@pytest.mark.parametrize(
772 'description, input_data, old_ubuntu, new_debian, expected',
773 [
774 (
775 'Common case',
776 Repo(
777 commits=[
778 Commit.from_spec(
779 name='old/debian'
780 ),
781 Commit.from_spec(
782 parents=[Placeholder('old/debian')],
783 name='old/ubuntu',
784 changelog_versions=['1-1ubuntu1', '1-1'],
785 ),
786 Commit.from_spec(
787 parents=[Placeholder('old/debian')],
788 name='new/debian',
789 changelog_versions=['2-1', '1-1'],
790 ),
791 ],
792 tags={
793 'pkg/import/1-1': Placeholder('old/debian'),
794 'pkg/import/1-1ubuntu1': Placeholder('old/ubuntu'),
795 'pkg/import/2-1': Placeholder('new/debian'),
796 },
797 ),
798 'pkg/import/1-1ubuntu1',
799 'pkg/import/2-1',
800 'pkg/import/1-1',
801 ),
802 (
803 'Ubuntu delta based on a NMU',
804 Repo(
805 commits=[
806 Commit.from_spec(
807 name='fork_point'
808 ),
809 Commit.from_spec(
810 parents=[Placeholder('fork_point')],
811 name='old/debian',
812 changelog_versions=['1-1.1', '1-1'],
813 ),
814 Commit.from_spec(
815 parents=[Placeholder('old/debian')],
816 name='old/ubuntu',
817 changelog_versions=['1-1.1ubuntu1', '1-1.1', '1-1'],
818 ),
819 Commit.from_spec(
820 parents=[Placeholder('fork_point')],
821 name='new/debian',
822 changelog_versions=['2-1', '1-1'],
823 ),
824 ],
825 tags={
826 'pkg/import/1-1': Placeholder('fork_point'),
827 'pkg/import/1-1.1': Placeholder('old/debian'),
828 'pkg/import/1-1.1ubuntu1': Placeholder('old/ubuntu'),
829 'pkg/import/2-1': Placeholder('new/debian'),
830 },
831 ),
832 'pkg/import/1-1.1ubuntu1',
833 'pkg/import/2-1',
834 'pkg/import/1-1.1',
835 ),
836 (
837 'Ubuntu upstream version head of Debian',
838 Repo(
839 commits=[
840 Commit.from_spec(
841 name='old/debian'
842 ),
843 Commit.from_spec(
844 parents=[Placeholder('old/debian')],
845 name='mid_ubuntu',
846 changelog_versions=['1-1ubuntu1', '1-1'],
847 ),
848 Commit.from_spec(
849 parents=[Placeholder('mid_ubuntu')],
850 name='old/ubuntu',
851 changelog_versions=['2-0ubuntu1', '1-1ubuntu1', '1-1'],
852 ),
853 Commit.from_spec(
854 parents=[Placeholder('old/debian')],
855 name='new/debian',
856 changelog_versions=['3-1', '1-1'],
857 ),
858 ],
859 tags={
860 'pkg/import/1-1': Placeholder('old/debian'),
861 'pkg/import/1-1ubuntu1': Placeholder('mid_ubuntu'),
862 'pkg/import/2-0ubuntu1': Placeholder('old/ubuntu'),
863 'pkg/import/3-1': Placeholder('new/debian'),
864 },
865 ),
866 'pkg/import/2-0ubuntu1',
867 'pkg/import/3-1',
868 'pkg/import/1-1',
869 ),
870 ],
871)
872def test_repo_find_ubuntu_merge(
873 description,
874 repo,
875 input_data,
876 old_ubuntu,
877 new_debian,
878 expected,
879):
880 input_data.write(repo.raw_repo)
881 merge_base = repo.find_ubuntu_merge_base(old_ubuntu)
882
883 assert merge_base
884
885 assert str(
886 repo.get_commitish(merge_base).peel(pygit2.Commit).id
887 ) == str(
888 repo.get_commitish(expected).peel(pygit2.Commit).id
889 )
890
891
892def test_repo_does_cleanup():
893 path = tempfile.mkdtemp()
894 try:
895 repo = target.GitUbuntuRepository(
896 path,
897 delete_on_close=True,
898 )
899 repo.close()
900 assert not os.path.exists(path)
901 finally:
902 shutil.rmtree(path, ignore_errors=True)
903
904
905def test_repo_does_not_cleanup():
906 path = tempfile.mkdtemp()
907 try:
908 repo = target.GitUbuntuRepository(
909 path,
910 delete_on_close=False,
911 )
912 repo.close()
913 assert os.path.exists(path)
914 finally:
915 shutil.rmtree(path, ignore_errors=True)
916
917
918@pytest.mark.parametrize(
919 [
920 'year',
921 'month',
922 'day',
923 'hours',
924 'minutes',
925 'seconds',
926 'milliseconds',
927 'hour_delta',
928 'expected',
929 ], [
930 (1970, 1, 1, 0, 0, 0, 0, 0, (0, 0)),
931 (1970, 1, 1, 0, 0, 0, 600, 0, (0, 0)),
932 (1970, 1, 1, 1, 0, 0, 0, 1, (0, 60)),
933 (1970, 1, 1, 0, 0, 0, 0, -1, (3600, -60)),
934 (1971, 2, 3, 4, 5, 6, 7, -8, (34430706, -480)),
935 ]
936)
937def test_datetime_to_signature_spec(
938 year,
939 month,
The diff has been truncated for viewing.

Subscribers

People subscribed via source and target branches